Test Report: Docker_Linux_containerd_arm64 18859

                    
                      5bbb68fdb343a4fd0bac66b69dd2693514a1fa6d:2024-07-04:35168
                    
                

Test fail (11/328)

x
+
TestAddons/parallel/Ingress (36.99s)

                                                
                                                
=== RUN   TestAddons/parallel/Ingress
=== PAUSE TestAddons/parallel/Ingress

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Ingress
addons_test.go:209: (dbg) Run:  kubectl --context addons-155517 wait --for=condition=ready --namespace=ingress-nginx pod --selector=app.kubernetes.io/component=controller --timeout=90s
addons_test.go:234: (dbg) Run:  kubectl --context addons-155517 replace --force -f testdata/nginx-ingress-v1.yaml
addons_test.go:247: (dbg) Run:  kubectl --context addons-155517 replace --force -f testdata/nginx-pod-svc.yaml
addons_test.go:252: (dbg) TestAddons/parallel/Ingress: waiting 8m0s for pods matching "run=nginx" in namespace "default" ...
helpers_test.go:344: "nginx" [7a02e81d-9d0c-4c14-a0c1-1af4f2572009] Pending / Ready:ContainersNotReady (containers with unready status: [nginx]) / ContainersReady:ContainersNotReady (containers with unready status: [nginx])
helpers_test.go:344: "nginx" [7a02e81d-9d0c-4c14-a0c1-1af4f2572009] Running
addons_test.go:252: (dbg) TestAddons/parallel/Ingress: run=nginx healthy within 9.004330729s
addons_test.go:264: (dbg) Run:  out/minikube-linux-arm64 -p addons-155517 ssh "curl -s http://127.0.0.1/ -H 'Host: nginx.example.com'"
addons_test.go:288: (dbg) Run:  kubectl --context addons-155517 replace --force -f testdata/ingress-dns-example-v1.yaml
addons_test.go:293: (dbg) Run:  out/minikube-linux-arm64 -p addons-155517 ip
addons_test.go:299: (dbg) Run:  nslookup hello-john.test 192.168.49.2
addons_test.go:299: (dbg) Non-zero exit: nslookup hello-john.test 192.168.49.2: exit status 1 (15.071490722s)

                                                
                                                
-- stdout --
	;; connection timed out; no servers could be reached
	
	

                                                
                                                
-- /stdout --
addons_test.go:301: failed to nslookup hello-john.test host. args "nslookup hello-john.test 192.168.49.2" : exit status 1
addons_test.go:305: unexpected output from nslookup. stdout: ;; connection timed out; no servers could be reached

                                                
                                                

                                                
                                                

                                                
                                                
stderr: 
addons_test.go:308: (dbg) Run:  out/minikube-linux-arm64 -p addons-155517 addons disable ingress-dns --alsologtostderr -v=1
addons_test.go:308: (dbg) Done: out/minikube-linux-arm64 -p addons-155517 addons disable ingress-dns --alsologtostderr -v=1: (1.317970964s)
addons_test.go:313: (dbg) Run:  out/minikube-linux-arm64 -p addons-155517 addons disable ingress --alsologtostderr -v=1
addons_test.go:313: (dbg) Done: out/minikube-linux-arm64 -p addons-155517 addons disable ingress --alsologtostderr -v=1: (7.822558656s)
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestAddons/parallel/Ingress]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect addons-155517
helpers_test.go:235: (dbg) docker inspect addons-155517:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "3fdcd90a73a7ea02e66574c10f20fe7e4895345c14c83582a4b20690b97b6908",
	        "Created": "2024-07-04T01:08:36.303265492Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1196940,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-07-04T01:08:36.435418596Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:fe62b5a5301065dd92924d274286e0d1b2227c557eb51c213d07169631b2b3f7",
	        "ResolvConfPath": "/var/lib/docker/containers/3fdcd90a73a7ea02e66574c10f20fe7e4895345c14c83582a4b20690b97b6908/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/3fdcd90a73a7ea02e66574c10f20fe7e4895345c14c83582a4b20690b97b6908/hostname",
	        "HostsPath": "/var/lib/docker/containers/3fdcd90a73a7ea02e66574c10f20fe7e4895345c14c83582a4b20690b97b6908/hosts",
	        "LogPath": "/var/lib/docker/containers/3fdcd90a73a7ea02e66574c10f20fe7e4895345c14c83582a4b20690b97b6908/3fdcd90a73a7ea02e66574c10f20fe7e4895345c14c83582a4b20690b97b6908-json.log",
	        "Name": "/addons-155517",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "addons-155517:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "addons-155517",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/0c9e9525005bbc1d92b91271f9bbc278c449163313acc66d3f355c17ed992908-init/diff:/var/lib/docker/overlay2/04be1cfb4b9b173c47d5bff32a15bd2c62951348a7d8ba248dee1fc574bba292/diff",
	                "MergedDir": "/var/lib/docker/overlay2/0c9e9525005bbc1d92b91271f9bbc278c449163313acc66d3f355c17ed992908/merged",
	                "UpperDir": "/var/lib/docker/overlay2/0c9e9525005bbc1d92b91271f9bbc278c449163313acc66d3f355c17ed992908/diff",
	                "WorkDir": "/var/lib/docker/overlay2/0c9e9525005bbc1d92b91271f9bbc278c449163313acc66d3f355c17ed992908/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "addons-155517",
	                "Source": "/var/lib/docker/volumes/addons-155517/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "addons-155517",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "addons-155517",
	                "name.minikube.sigs.k8s.io": "addons-155517",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "00149764ac28081fa271644bd8abd017b3859409342eb0cd27d7072d3bc248ad",
	            "SandboxKey": "/var/run/docker/netns/00149764ac28",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33941"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33942"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33945"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33943"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33944"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "addons-155517": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "664a751e340faab9d5bafc3b0d537e0c69162e18e1e2ac12a6117fe790e76074",
	                    "EndpointID": "8e02106e4b600acb13911d96d2636d06ef6a5e9c2d13c8b3302477a5d63292bc",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "addons-155517",
	                        "3fdcd90a73a7"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p addons-155517 -n addons-155517
helpers_test.go:244: <<< TestAddons/parallel/Ingress FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestAddons/parallel/Ingress]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p addons-155517 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p addons-155517 logs -n 25: (1.485194454s)
helpers_test.go:252: TestAddons/parallel/Ingress logs: 
-- stdout --
	
	==> Audit <==
	|---------|---------------------------------------------------------------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| Command |                                            Args                                             |        Profile         |  User   | Version |     Start Time      |      End Time       |
	|---------|---------------------------------------------------------------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| delete  | -p download-only-526823                                                                     | download-only-526823   | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| delete  | -p download-only-327632                                                                     | download-only-327632   | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| delete  | -p download-only-526823                                                                     | download-only-526823   | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| start   | --download-only -p                                                                          | download-docker-898650 | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC |                     |
	|         | download-docker-898650                                                                      |                        |         |         |                     |                     |
	|         | --alsologtostderr                                                                           |                        |         |         |                     |                     |
	|         | --driver=docker                                                                             |                        |         |         |                     |                     |
	|         | --container-runtime=containerd                                                              |                        |         |         |                     |                     |
	| delete  | -p download-docker-898650                                                                   | download-docker-898650 | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| start   | --download-only -p                                                                          | binary-mirror-236711   | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC |                     |
	|         | binary-mirror-236711                                                                        |                        |         |         |                     |                     |
	|         | --alsologtostderr                                                                           |                        |         |         |                     |                     |
	|         | --binary-mirror                                                                             |                        |         |         |                     |                     |
	|         | http://127.0.0.1:35737                                                                      |                        |         |         |                     |                     |
	|         | --driver=docker                                                                             |                        |         |         |                     |                     |
	|         | --container-runtime=containerd                                                              |                        |         |         |                     |                     |
	| delete  | -p binary-mirror-236711                                                                     | binary-mirror-236711   | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| addons  | enable dashboard -p                                                                         | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC |                     |
	|         | addons-155517                                                                               |                        |         |         |                     |                     |
	| addons  | disable dashboard -p                                                                        | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC |                     |
	|         | addons-155517                                                                               |                        |         |         |                     |                     |
	| start   | -p addons-155517 --wait=true                                                                | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:10 UTC |
	|         | --memory=4000 --alsologtostderr                                                             |                        |         |         |                     |                     |
	|         | --addons=registry                                                                           |                        |         |         |                     |                     |
	|         | --addons=metrics-server                                                                     |                        |         |         |                     |                     |
	|         | --addons=volumesnapshots                                                                    |                        |         |         |                     |                     |
	|         | --addons=csi-hostpath-driver                                                                |                        |         |         |                     |                     |
	|         | --addons=gcp-auth                                                                           |                        |         |         |                     |                     |
	|         | --addons=cloud-spanner                                                                      |                        |         |         |                     |                     |
	|         | --addons=inspektor-gadget                                                                   |                        |         |         |                     |                     |
	|         | --addons=storage-provisioner-rancher                                                        |                        |         |         |                     |                     |
	|         | --addons=nvidia-device-plugin                                                               |                        |         |         |                     |                     |
	|         | --addons=yakd --addons=volcano                                                              |                        |         |         |                     |                     |
	|         | --driver=docker                                                                             |                        |         |         |                     |                     |
	|         | --container-runtime=containerd                                                              |                        |         |         |                     |                     |
	|         | --addons=ingress                                                                            |                        |         |         |                     |                     |
	|         | --addons=ingress-dns                                                                        |                        |         |         |                     |                     |
	| addons  | enable headlamp                                                                             | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:10 UTC | 04 Jul 24 01:10 UTC |
	|         | -p addons-155517                                                                            |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1                                                                      |                        |         |         |                     |                     |
	| ip      | addons-155517 ip                                                                            | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:11 UTC | 04 Jul 24 01:11 UTC |
	| addons  | addons-155517 addons disable                                                                | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:11 UTC | 04 Jul 24 01:11 UTC |
	|         | registry --alsologtostderr                                                                  |                        |         |         |                     |                     |
	|         | -v=1                                                                                        |                        |         |         |                     |                     |
	| addons  | disable nvidia-device-plugin                                                                | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:11 UTC | 04 Jul 24 01:11 UTC |
	|         | -p addons-155517                                                                            |                        |         |         |                     |                     |
	| ssh     | addons-155517 ssh cat                                                                       | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:11 UTC | 04 Jul 24 01:11 UTC |
	|         | /opt/local-path-provisioner/pvc-c05b7d52-4c97-4ba9-8a04-478d46aaf85d_default_test-pvc/file1 |                        |         |         |                     |                     |
	| addons  | addons-155517 addons disable                                                                | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:11 UTC | 04 Jul 24 01:12 UTC |
	|         | storage-provisioner-rancher                                                                 |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1                                                                      |                        |         |         |                     |                     |
	| addons  | disable cloud-spanner -p                                                                    | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:12 UTC | 04 Jul 24 01:12 UTC |
	|         | addons-155517                                                                               |                        |         |         |                     |                     |
	| addons  | addons-155517 addons                                                                        | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:12 UTC | 04 Jul 24 01:12 UTC |
	|         | disable csi-hostpath-driver                                                                 |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1                                                                      |                        |         |         |                     |                     |
	| addons  | addons-155517 addons                                                                        | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:12 UTC | 04 Jul 24 01:12 UTC |
	|         | disable volumesnapshots                                                                     |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1                                                                      |                        |         |         |                     |                     |
	| addons  | disable inspektor-gadget -p                                                                 | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:12 UTC |                     |
	|         | addons-155517                                                                               |                        |         |         |                     |                     |
	| addons  | addons-155517 addons                                                                        | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:14 UTC | 04 Jul 24 01:14 UTC |
	|         | disable metrics-server                                                                      |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1                                                                      |                        |         |         |                     |                     |
	| ssh     | addons-155517 ssh curl -s                                                                   | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:14 UTC | 04 Jul 24 01:14 UTC |
	|         | http://127.0.0.1/ -H 'Host:                                                                 |                        |         |         |                     |                     |
	|         | nginx.example.com'                                                                          |                        |         |         |                     |                     |
	| ip      | addons-155517 ip                                                                            | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:14 UTC | 04 Jul 24 01:14 UTC |
	| addons  | addons-155517 addons disable                                                                | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:14 UTC | 04 Jul 24 01:14 UTC |
	|         | ingress-dns --alsologtostderr                                                               |                        |         |         |                     |                     |
	|         | -v=1                                                                                        |                        |         |         |                     |                     |
	| addons  | addons-155517 addons disable                                                                | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:14 UTC | 04 Jul 24 01:15 UTC |
	|         | ingress --alsologtostderr -v=1                                                              |                        |         |         |                     |                     |
	|---------|---------------------------------------------------------------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/07/04 01:08:12
	Running on machine: ip-172-31-30-239
	Binary: Built with gc go1.22.4 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0704 01:08:12.192271 1196445 out.go:291] Setting OutFile to fd 1 ...
	I0704 01:08:12.192443 1196445 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:08:12.192469 1196445 out.go:304] Setting ErrFile to fd 2...
	I0704 01:08:12.192488 1196445 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:08:12.192753 1196445 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
	I0704 01:08:12.193235 1196445 out.go:298] Setting JSON to false
	I0704 01:08:12.194161 1196445 start.go:129] hostinfo: {"hostname":"ip-172-31-30-239","uptime":24643,"bootTime":1720030650,"procs":172,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1064-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"92f46a7d-c249-4c12-924a-77f64874c910"}
	I0704 01:08:12.194241 1196445 start.go:139] virtualization:  
	I0704 01:08:12.196416 1196445 out.go:177] * [addons-155517] minikube v1.33.1 on Ubuntu 20.04 (arm64)
	I0704 01:08:12.199094 1196445 out.go:177]   - MINIKUBE_LOCATION=18859
	I0704 01:08:12.199240 1196445 notify.go:220] Checking for updates...
	I0704 01:08:12.202974 1196445 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0704 01:08:12.205218 1196445 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig
	I0704 01:08:12.207279 1196445 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube
	I0704 01:08:12.209429 1196445 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0704 01:08:12.211932 1196445 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0704 01:08:12.213938 1196445 driver.go:392] Setting default libvirt URI to qemu:///system
	I0704 01:08:12.234468 1196445 docker.go:122] docker version: linux-27.0.3:Docker Engine - Community
	I0704 01:08:12.234611 1196445 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 01:08:12.299096 1196445 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:29 OomKillDisable:true NGoroutines:44 SystemTime:2024-07-04 01:08:12.289472659 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 01:08:12.299211 1196445 docker.go:295] overlay module found
	I0704 01:08:12.301600 1196445 out.go:177] * Using the docker driver based on user configuration
	I0704 01:08:12.303741 1196445 start.go:297] selected driver: docker
	I0704 01:08:12.303757 1196445 start.go:901] validating driver "docker" against <nil>
	I0704 01:08:12.303770 1196445 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0704 01:08:12.304380 1196445 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 01:08:12.362176 1196445 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:29 OomKillDisable:true NGoroutines:44 SystemTime:2024-07-04 01:08:12.352995693 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 01:08:12.362343 1196445 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0704 01:08:12.362580 1196445 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0704 01:08:12.364680 1196445 out.go:177] * Using Docker driver with root privileges
	I0704 01:08:12.366418 1196445 cni.go:84] Creating CNI manager for ""
	I0704 01:08:12.366456 1196445 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0704 01:08:12.366471 1196445 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0704 01:08:12.366574 1196445 start.go:340] cluster config:
	{Name:addons-155517 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:addons-155517 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime
:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHA
uthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0704 01:08:12.368869 1196445 out.go:177] * Starting "addons-155517" primary control-plane node in "addons-155517" cluster
	I0704 01:08:12.371240 1196445 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0704 01:08:12.373736 1196445 out.go:177] * Pulling base image v0.0.44-1719972989-19184 ...
	I0704 01:08:12.376681 1196445 preload.go:132] Checking if preload exists for k8s version v1.30.2 and runtime containerd
	I0704 01:08:12.376741 1196445 preload.go:147] Found local preload: /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.30.2-containerd-overlay2-arm64.tar.lz4
	I0704 01:08:12.376753 1196445 cache.go:56] Caching tarball of preloaded images
	I0704 01:08:12.376764 1196445 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 in local docker daemon
	I0704 01:08:12.376836 1196445 preload.go:173] Found /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.30.2-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0704 01:08:12.376846 1196445 cache.go:59] Finished verifying existence of preloaded tar for v1.30.2 on containerd
	I0704 01:08:12.377194 1196445 profile.go:143] Saving config to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/config.json ...
	I0704 01:08:12.377218 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/config.json: {Name:mk1983fdaacaaa697964d44e7205145a658b8fe2 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:12.392599 1196445 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 to local cache
	I0704 01:08:12.392727 1196445 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 in local cache directory
	I0704 01:08:12.392756 1196445 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 in local cache directory, skipping pull
	I0704 01:08:12.392765 1196445 image.go:105] gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 exists in cache, skipping pull
	I0704 01:08:12.392773 1196445 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 as a tarball
	I0704 01:08:12.392779 1196445 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 from local cache
	I0704 01:08:28.987230 1196445 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 from cached tarball
	I0704 01:08:28.987269 1196445 cache.go:194] Successfully downloaded all kic artifacts
	I0704 01:08:28.987306 1196445 start.go:360] acquireMachinesLock for addons-155517: {Name:mk8b1bd096582ae2ddeb51ce97c96e8bd6c10c03 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0704 01:08:28.987436 1196445 start.go:364] duration metric: took 108.035µs to acquireMachinesLock for "addons-155517"
	I0704 01:08:28.987467 1196445 start.go:93] Provisioning new machine with config: &{Name:addons-155517 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:addons-155517 Namespace:default APIServerHAVIP: APIServerName:min
ikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:fa
lse CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0704 01:08:28.987567 1196445 start.go:125] createHost starting for "" (driver="docker")
	I0704 01:08:28.990219 1196445 out.go:204] * Creating docker container (CPUs=2, Memory=4000MB) ...
	I0704 01:08:28.990467 1196445 start.go:159] libmachine.API.Create for "addons-155517" (driver="docker")
	I0704 01:08:28.990494 1196445 client.go:168] LocalClient.Create starting
	I0704 01:08:28.990609 1196445 main.go:141] libmachine: Creating CA: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem
	I0704 01:08:29.608241 1196445 main.go:141] libmachine: Creating client certificate: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/cert.pem
	I0704 01:08:29.736436 1196445 cli_runner.go:164] Run: docker network inspect addons-155517 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0704 01:08:29.752779 1196445 cli_runner.go:211] docker network inspect addons-155517 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0704 01:08:29.752887 1196445 network_create.go:284] running [docker network inspect addons-155517] to gather additional debugging logs...
	I0704 01:08:29.752909 1196445 cli_runner.go:164] Run: docker network inspect addons-155517
	W0704 01:08:29.768706 1196445 cli_runner.go:211] docker network inspect addons-155517 returned with exit code 1
	I0704 01:08:29.768736 1196445 network_create.go:287] error running [docker network inspect addons-155517]: docker network inspect addons-155517: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network addons-155517 not found
	I0704 01:08:29.768749 1196445 network_create.go:289] output of [docker network inspect addons-155517]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network addons-155517 not found
	
	** /stderr **
	I0704 01:08:29.768847 1196445 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0704 01:08:29.784069 1196445 network.go:206] using free private subnet 192.168.49.0/24: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x4001c8c290}
	I0704 01:08:29.784107 1196445 network_create.go:124] attempt to create docker network addons-155517 192.168.49.0/24 with gateway 192.168.49.1 and MTU of 1500 ...
	I0704 01:08:29.784173 1196445 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.49.0/24 --gateway=192.168.49.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=addons-155517 addons-155517
	I0704 01:08:29.849610 1196445 network_create.go:108] docker network addons-155517 192.168.49.0/24 created
	I0704 01:08:29.849640 1196445 kic.go:121] calculated static IP "192.168.49.2" for the "addons-155517" container
	I0704 01:08:29.849732 1196445 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0704 01:08:29.864369 1196445 cli_runner.go:164] Run: docker volume create addons-155517 --label name.minikube.sigs.k8s.io=addons-155517 --label created_by.minikube.sigs.k8s.io=true
	I0704 01:08:29.881618 1196445 oci.go:103] Successfully created a docker volume addons-155517
	I0704 01:08:29.881710 1196445 cli_runner.go:164] Run: docker run --rm --name addons-155517-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-155517 --entrypoint /usr/bin/test -v addons-155517:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 -d /var/lib
	I0704 01:08:31.824188 1196445 cli_runner.go:217] Completed: docker run --rm --name addons-155517-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-155517 --entrypoint /usr/bin/test -v addons-155517:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 -d /var/lib: (1.942438119s)
	I0704 01:08:31.824231 1196445 oci.go:107] Successfully prepared a docker volume addons-155517
	I0704 01:08:31.824258 1196445 preload.go:132] Checking if preload exists for k8s version v1.30.2 and runtime containerd
	I0704 01:08:31.824278 1196445 kic.go:194] Starting extracting preloaded images to volume ...
	I0704 01:08:31.824367 1196445 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.30.2-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v addons-155517:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 -I lz4 -xf /preloaded.tar -C /extractDir
	I0704 01:08:36.228874 1196445 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.30.2-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v addons-155517:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 -I lz4 -xf /preloaded.tar -C /extractDir: (4.404462021s)
	I0704 01:08:36.228908 1196445 kic.go:203] duration metric: took 4.404626194s to extract preloaded images to volume ...
	W0704 01:08:36.229067 1196445 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0704 01:08:36.229179 1196445 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0704 01:08:36.288715 1196445 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname addons-155517 --name addons-155517 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-155517 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=addons-155517 --network addons-155517 --ip 192.168.49.2 --volume addons-155517:/var --security-opt apparmor=unconfined --memory=4000mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1
	I0704 01:08:36.596197 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Running}}
	I0704 01:08:36.616187 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:08:36.634979 1196445 cli_runner.go:164] Run: docker exec addons-155517 stat /var/lib/dpkg/alternatives/iptables
	I0704 01:08:36.703846 1196445 oci.go:144] the created container "addons-155517" has a running status.
	I0704 01:08:36.703874 1196445 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa...
	I0704 01:08:37.190789 1196445 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0704 01:08:37.210820 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:08:37.232784 1196445 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0704 01:08:37.232803 1196445 kic_runner.go:114] Args: [docker exec --privileged addons-155517 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0704 01:08:37.300309 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:08:37.323611 1196445 machine.go:94] provisionDockerMachine start ...
	I0704 01:08:37.323700 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:37.348214 1196445 main.go:141] libmachine: Using SSH client type: native
	I0704 01:08:37.348477 1196445 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e2ba0] 0x3e5400 <nil>  [] 0s} 127.0.0.1 33941 <nil> <nil>}
	I0704 01:08:37.348487 1196445 main.go:141] libmachine: About to run SSH command:
	hostname
	I0704 01:08:37.530824 1196445 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-155517
	
	I0704 01:08:37.530887 1196445 ubuntu.go:169] provisioning hostname "addons-155517"
	I0704 01:08:37.530986 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:37.553591 1196445 main.go:141] libmachine: Using SSH client type: native
	I0704 01:08:37.553832 1196445 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e2ba0] 0x3e5400 <nil>  [] 0s} 127.0.0.1 33941 <nil> <nil>}
	I0704 01:08:37.553843 1196445 main.go:141] libmachine: About to run SSH command:
	sudo hostname addons-155517 && echo "addons-155517" | sudo tee /etc/hostname
	I0704 01:08:37.714152 1196445 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-155517
	
	I0704 01:08:37.714277 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:37.740011 1196445 main.go:141] libmachine: Using SSH client type: native
	I0704 01:08:37.740256 1196445 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e2ba0] 0x3e5400 <nil>  [] 0s} 127.0.0.1 33941 <nil> <nil>}
	I0704 01:08:37.740273 1196445 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\saddons-155517' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 addons-155517/g' /etc/hosts;
				else 
					echo '127.0.1.1 addons-155517' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0704 01:08:37.884114 1196445 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0704 01:08:37.884141 1196445 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/18859-1190282/.minikube CaCertPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/18859-1190282/.minikube}
	I0704 01:08:37.884170 1196445 ubuntu.go:177] setting up certificates
	I0704 01:08:37.884179 1196445 provision.go:84] configureAuth start
	I0704 01:08:37.884270 1196445 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-155517
	I0704 01:08:37.903878 1196445 provision.go:143] copyHostCerts
	I0704 01:08:37.903989 1196445 exec_runner.go:151] cp: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.pem (1078 bytes)
	I0704 01:08:37.904149 1196445 exec_runner.go:151] cp: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/18859-1190282/.minikube/cert.pem (1123 bytes)
	I0704 01:08:37.904212 1196445 exec_runner.go:151] cp: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/18859-1190282/.minikube/key.pem (1675 bytes)
	I0704 01:08:37.904262 1196445 provision.go:117] generating server cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca-key.pem org=jenkins.addons-155517 san=[127.0.0.1 192.168.49.2 addons-155517 localhost minikube]
	I0704 01:08:38.603884 1196445 provision.go:177] copyRemoteCerts
	I0704 01:08:38.603955 1196445 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0704 01:08:38.604000 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:38.623978 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:08:38.720166 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0704 01:08:38.743354 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0704 01:08:38.766785 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0704 01:08:38.789642 1196445 provision.go:87] duration metric: took 905.449187ms to configureAuth
	I0704 01:08:38.789672 1196445 ubuntu.go:193] setting minikube options for container-runtime
	I0704 01:08:38.789856 1196445 config.go:182] Loaded profile config "addons-155517": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 01:08:38.789871 1196445 machine.go:97] duration metric: took 1.466242644s to provisionDockerMachine
	I0704 01:08:38.789879 1196445 client.go:171] duration metric: took 9.799379567s to LocalClient.Create
	I0704 01:08:38.789892 1196445 start.go:167] duration metric: took 9.799425482s to libmachine.API.Create "addons-155517"
	I0704 01:08:38.789902 1196445 start.go:293] postStartSetup for "addons-155517" (driver="docker")
	I0704 01:08:38.789912 1196445 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0704 01:08:38.789968 1196445 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0704 01:08:38.790010 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:38.806758 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:08:38.904686 1196445 ssh_runner.go:195] Run: cat /etc/os-release
	I0704 01:08:38.908018 1196445 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0704 01:08:38.908055 1196445 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0704 01:08:38.908066 1196445 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0704 01:08:38.908082 1196445 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0704 01:08:38.908093 1196445 filesync.go:126] Scanning /home/jenkins/minikube-integration/18859-1190282/.minikube/addons for local assets ...
	I0704 01:08:38.908162 1196445 filesync.go:126] Scanning /home/jenkins/minikube-integration/18859-1190282/.minikube/files for local assets ...
	I0704 01:08:38.908190 1196445 start.go:296] duration metric: took 118.282456ms for postStartSetup
	I0704 01:08:38.908511 1196445 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-155517
	I0704 01:08:38.923846 1196445 profile.go:143] Saving config to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/config.json ...
	I0704 01:08:38.924136 1196445 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0704 01:08:38.924185 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:38.940968 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:08:39.036923 1196445 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0704 01:08:39.042092 1196445 start.go:128] duration metric: took 10.054507303s to createHost
	I0704 01:08:39.042119 1196445 start.go:83] releasing machines lock for "addons-155517", held for 10.054668801s
	I0704 01:08:39.042203 1196445 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-155517
	I0704 01:08:39.059237 1196445 ssh_runner.go:195] Run: cat /version.json
	I0704 01:08:39.059305 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:39.059637 1196445 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0704 01:08:39.059700 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:39.080747 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:08:39.097096 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:08:39.175033 1196445 ssh_runner.go:195] Run: systemctl --version
	I0704 01:08:39.348341 1196445 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0704 01:08:39.352559 1196445 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0704 01:08:39.378748 1196445 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0704 01:08:39.378866 1196445 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%!p(MISSING), " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0704 01:08:39.409039 1196445 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0704 01:08:39.409077 1196445 start.go:495] detecting cgroup driver to use...
	I0704 01:08:39.409110 1196445 detect.go:196] detected "cgroupfs" cgroup driver on host os
	I0704 01:08:39.409164 1196445 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0704 01:08:39.422321 1196445 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0704 01:08:39.434248 1196445 docker.go:217] disabling cri-docker service (if available) ...
	I0704 01:08:39.434344 1196445 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0704 01:08:39.448530 1196445 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0704 01:08:39.463578 1196445 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0704 01:08:39.552252 1196445 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0704 01:08:39.641041 1196445 docker.go:233] disabling docker service ...
	I0704 01:08:39.641108 1196445 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0704 01:08:39.660807 1196445 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0704 01:08:39.673868 1196445 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0704 01:08:39.763520 1196445 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0704 01:08:39.851995 1196445 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0704 01:08:39.863472 1196445 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %!s(MISSING) "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0704 01:08:39.880705 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.9"|' /etc/containerd/config.toml"
	I0704 01:08:39.890751 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0704 01:08:39.901368 1196445 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0704 01:08:39.901467 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0704 01:08:39.911715 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0704 01:08:39.921830 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0704 01:08:39.931688 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0704 01:08:39.941303 1196445 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0704 01:08:39.950585 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0704 01:08:39.960278 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0704 01:08:39.972286 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0704 01:08:39.982106 1196445 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0704 01:08:39.991352 1196445 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0704 01:08:39.999917 1196445 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0704 01:08:40.099343 1196445 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0704 01:08:40.243998 1196445 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0704 01:08:40.244098 1196445 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0704 01:08:40.247881 1196445 start.go:563] Will wait 60s for crictl version
	I0704 01:08:40.247950 1196445 ssh_runner.go:195] Run: which crictl
	I0704 01:08:40.251327 1196445 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0704 01:08:40.296521 1196445 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.18
	RuntimeApiVersion:  v1
	I0704 01:08:40.296607 1196445 ssh_runner.go:195] Run: containerd --version
	I0704 01:08:40.318696 1196445 ssh_runner.go:195] Run: containerd --version
	I0704 01:08:40.343842 1196445 out.go:177] * Preparing Kubernetes v1.30.2 on containerd 1.7.18 ...
	I0704 01:08:40.345454 1196445 cli_runner.go:164] Run: docker network inspect addons-155517 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0704 01:08:40.361140 1196445 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0704 01:08:40.364944 1196445 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0704 01:08:40.376129 1196445 kubeadm.go:877] updating cluster {Name:addons-155517 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:addons-155517 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNa
mes:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Cus
tomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0704 01:08:40.376264 1196445 preload.go:132] Checking if preload exists for k8s version v1.30.2 and runtime containerd
	I0704 01:08:40.376340 1196445 ssh_runner.go:195] Run: sudo crictl images --output json
	I0704 01:08:40.412122 1196445 containerd.go:627] all images are preloaded for containerd runtime.
	I0704 01:08:40.412146 1196445 containerd.go:534] Images already preloaded, skipping extraction
	I0704 01:08:40.412211 1196445 ssh_runner.go:195] Run: sudo crictl images --output json
	I0704 01:08:40.448869 1196445 containerd.go:627] all images are preloaded for containerd runtime.
	I0704 01:08:40.448892 1196445 cache_images.go:84] Images are preloaded, skipping loading
	I0704 01:08:40.448900 1196445 kubeadm.go:928] updating node { 192.168.49.2 8443 v1.30.2 containerd true true} ...
	I0704 01:08:40.449001 1196445 kubeadm.go:940] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.30.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=addons-155517 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.30.2 ClusterName:addons-155517 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0704 01:08:40.449071 1196445 ssh_runner.go:195] Run: sudo crictl info
	I0704 01:08:40.489171 1196445 cni.go:84] Creating CNI manager for ""
	I0704 01:08:40.489196 1196445 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0704 01:08:40.489206 1196445 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0704 01:08:40.489228 1196445 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.30.2 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:addons-155517 NodeName:addons-155517 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc
/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0704 01:08:40.489399 1196445 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "addons-155517"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.30.2
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%!"(MISSING)
	  nodefs.inodesFree: "0%!"(MISSING)
	  imagefs.available: "0%!"(MISSING)
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0704 01:08:40.489469 1196445 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.30.2
	I0704 01:08:40.498054 1196445 binaries.go:44] Found k8s binaries, skipping transfer
	I0704 01:08:40.498164 1196445 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0704 01:08:40.506560 1196445 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (317 bytes)
	I0704 01:08:40.524624 1196445 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0704 01:08:40.542714 1196445 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2167 bytes)
	I0704 01:08:40.560914 1196445 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0704 01:08:40.564512 1196445 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0704 01:08:40.575356 1196445 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0704 01:08:40.663366 1196445 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0704 01:08:40.678704 1196445 certs.go:68] Setting up /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517 for IP: 192.168.49.2
	I0704 01:08:40.678774 1196445 certs.go:194] generating shared ca certs ...
	I0704 01:08:40.678804 1196445 certs.go:226] acquiring lock for ca certs: {Name:mk4f0dbc18506f7ee4fcbc10f124348dd208ffc0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:40.678969 1196445 certs.go:240] generating "minikubeCA" ca cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.key
	I0704 01:08:41.197423 1196445 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt ...
	I0704 01:08:41.197454 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt: {Name:mkb28c983e13ee826bf585de68c8dd48b64194c2 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:41.197647 1196445 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.key ...
	I0704 01:08:41.197660 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.key: {Name:mk0f96934eb2f8ea5b78e7bab1383e47ca4c47bf Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:41.197743 1196445 certs.go:240] generating "proxyClientCA" ca cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.key
	I0704 01:08:41.716569 1196445 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.crt ...
	I0704 01:08:41.716608 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.crt: {Name:mk49decf76e004afe576981c44baf46e246e42aa Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:41.716807 1196445 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.key ...
	I0704 01:08:41.716820 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.key: {Name:mkfa598677724bedcdda29a6fc68fc0dff6ee016 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:41.717561 1196445 certs.go:256] generating profile certs ...
	I0704 01:08:41.717627 1196445 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.key
	I0704 01:08:41.717645 1196445 crypto.go:68] Generating cert /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt with IP's: []
	I0704 01:08:42.174023 1196445 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt ...
	I0704 01:08:42.174061 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: {Name:mkc35913a7eb1db2825718f6dc2b65e7745aa5c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:42.174290 1196445 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.key ...
	I0704 01:08:42.174304 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.key: {Name:mkfadadfe13d73634e029f31163c920115daacae Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:42.174396 1196445 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.key.545f2af5
	I0704 01:08:42.174417 1196445 crypto.go:68] Generating cert /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.crt.545f2af5 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2]
	I0704 01:08:42.578819 1196445 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.crt.545f2af5 ...
	I0704 01:08:42.578850 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.crt.545f2af5: {Name:mk7ecc68d7a90c858109ff8e83b26bc005452b6f Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:42.579635 1196445 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.key.545f2af5 ...
	I0704 01:08:42.579664 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.key.545f2af5: {Name:mk9daa218eca8496a54e034db67849e3cbe7a05c Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:42.580299 1196445 certs.go:381] copying /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.crt.545f2af5 -> /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.crt
	I0704 01:08:42.580398 1196445 certs.go:385] copying /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.key.545f2af5 -> /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.key
	I0704 01:08:42.580459 1196445 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.key
	I0704 01:08:42.580484 1196445 crypto.go:68] Generating cert /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.crt with IP's: []
	I0704 01:08:42.984339 1196445 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.crt ...
	I0704 01:08:42.984371 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.crt: {Name:mk9654e604aa4538cb254b142d3cacdd3534a634 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:42.984566 1196445 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.key ...
	I0704 01:08:42.984584 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.key: {Name:mkcae6cd9331f55e5fdb956fdd3942c8d213675d Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:42.984770 1196445 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca-key.pem (1679 bytes)
	I0704 01:08:42.984813 1196445 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem (1078 bytes)
	I0704 01:08:42.984842 1196445 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/cert.pem (1123 bytes)
	I0704 01:08:42.984870 1196445 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/key.pem (1675 bytes)
	I0704 01:08:42.985445 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0704 01:08:43.011174 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1671 bytes)
	I0704 01:08:43.035588 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0704 01:08:43.061964 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0704 01:08:43.086326 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1419 bytes)
	I0704 01:08:43.110608 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0704 01:08:43.134625 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0704 01:08:43.157943 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0704 01:08:43.181717 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0704 01:08:43.206975 1196445 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0704 01:08:43.224889 1196445 ssh_runner.go:195] Run: openssl version
	I0704 01:08:43.230415 1196445 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0704 01:08:43.239834 1196445 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0704 01:08:43.243271 1196445 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Jul  4 01:08 /usr/share/ca-certificates/minikubeCA.pem
	I0704 01:08:43.243394 1196445 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0704 01:08:43.250079 1196445 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0704 01:08:43.259384 1196445 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0704 01:08:43.262831 1196445 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0704 01:08:43.262919 1196445 kubeadm.go:391] StartCluster: {Name:addons-155517 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:addons-155517 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames
:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Custom
QemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0704 01:08:43.263009 1196445 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0704 01:08:43.263066 1196445 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0704 01:08:43.303591 1196445 cri.go:89] found id: ""
	I0704 01:08:43.303662 1196445 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0704 01:08:43.312342 1196445 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0704 01:08:43.321279 1196445 kubeadm.go:213] ignoring SystemVerification for kubeadm because of docker driver
	I0704 01:08:43.321369 1196445 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0704 01:08:43.329958 1196445 kubeadm.go:154] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0704 01:08:43.330017 1196445 kubeadm.go:156] found existing configuration files:
	
	I0704 01:08:43.330077 1196445 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0704 01:08:43.338479 1196445 kubeadm.go:162] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0704 01:08:43.338539 1196445 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0704 01:08:43.346581 1196445 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0704 01:08:43.355511 1196445 kubeadm.go:162] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0704 01:08:43.355614 1196445 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0704 01:08:43.363887 1196445 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0704 01:08:43.372345 1196445 kubeadm.go:162] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0704 01:08:43.372436 1196445 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0704 01:08:43.380824 1196445 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0704 01:08:43.389491 1196445 kubeadm.go:162] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0704 01:08:43.389581 1196445 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0704 01:08:43.398028 1196445 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.30.2:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0704 01:08:43.442669 1196445 kubeadm.go:309] [init] Using Kubernetes version: v1.30.2
	I0704 01:08:43.442729 1196445 kubeadm.go:309] [preflight] Running pre-flight checks
	I0704 01:08:43.482986 1196445 kubeadm.go:309] [preflight] The system verification failed. Printing the output from the verification:
	I0704 01:08:43.483062 1196445 kubeadm.go:309] KERNEL_VERSION: 5.15.0-1064-aws
	I0704 01:08:43.483101 1196445 kubeadm.go:309] OS: Linux
	I0704 01:08:43.483149 1196445 kubeadm.go:309] CGROUPS_CPU: enabled
	I0704 01:08:43.483199 1196445 kubeadm.go:309] CGROUPS_CPUACCT: enabled
	I0704 01:08:43.483248 1196445 kubeadm.go:309] CGROUPS_CPUSET: enabled
	I0704 01:08:43.483314 1196445 kubeadm.go:309] CGROUPS_DEVICES: enabled
	I0704 01:08:43.483364 1196445 kubeadm.go:309] CGROUPS_FREEZER: enabled
	I0704 01:08:43.483415 1196445 kubeadm.go:309] CGROUPS_MEMORY: enabled
	I0704 01:08:43.483465 1196445 kubeadm.go:309] CGROUPS_PIDS: enabled
	I0704 01:08:43.483531 1196445 kubeadm.go:309] CGROUPS_HUGETLB: enabled
	I0704 01:08:43.483582 1196445 kubeadm.go:309] CGROUPS_BLKIO: enabled
	I0704 01:08:43.557602 1196445 kubeadm.go:309] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0704 01:08:43.557712 1196445 kubeadm.go:309] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0704 01:08:43.557806 1196445 kubeadm.go:309] [preflight] You can also perform this action in beforehand using 'kubeadm config images pull'
	I0704 01:08:43.820944 1196445 kubeadm.go:309] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0704 01:08:43.824525 1196445 out.go:204]   - Generating certificates and keys ...
	I0704 01:08:43.824630 1196445 kubeadm.go:309] [certs] Using existing ca certificate authority
	I0704 01:08:43.824724 1196445 kubeadm.go:309] [certs] Using existing apiserver certificate and key on disk
	I0704 01:08:44.229464 1196445 kubeadm.go:309] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0704 01:08:44.869561 1196445 kubeadm.go:309] [certs] Generating "front-proxy-ca" certificate and key
	I0704 01:08:44.995121 1196445 kubeadm.go:309] [certs] Generating "front-proxy-client" certificate and key
	I0704 01:08:45.945765 1196445 kubeadm.go:309] [certs] Generating "etcd/ca" certificate and key
	I0704 01:08:46.426406 1196445 kubeadm.go:309] [certs] Generating "etcd/server" certificate and key
	I0704 01:08:46.426727 1196445 kubeadm.go:309] [certs] etcd/server serving cert is signed for DNS names [addons-155517 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0704 01:08:46.568753 1196445 kubeadm.go:309] [certs] Generating "etcd/peer" certificate and key
	I0704 01:08:46.569065 1196445 kubeadm.go:309] [certs] etcd/peer serving cert is signed for DNS names [addons-155517 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0704 01:08:46.824637 1196445 kubeadm.go:309] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0704 01:08:47.178188 1196445 kubeadm.go:309] [certs] Generating "apiserver-etcd-client" certificate and key
	I0704 01:08:47.938770 1196445 kubeadm.go:309] [certs] Generating "sa" key and public key
	I0704 01:08:47.939012 1196445 kubeadm.go:309] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0704 01:08:48.532992 1196445 kubeadm.go:309] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0704 01:08:48.700658 1196445 kubeadm.go:309] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0704 01:08:49.343140 1196445 kubeadm.go:309] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0704 01:08:50.301229 1196445 kubeadm.go:309] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0704 01:08:50.970394 1196445 kubeadm.go:309] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0704 01:08:50.971331 1196445 kubeadm.go:309] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0704 01:08:50.976216 1196445 kubeadm.go:309] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0704 01:08:50.978409 1196445 out.go:204]   - Booting up control plane ...
	I0704 01:08:50.978513 1196445 kubeadm.go:309] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0704 01:08:50.978594 1196445 kubeadm.go:309] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0704 01:08:50.979204 1196445 kubeadm.go:309] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0704 01:08:50.989631 1196445 kubeadm.go:309] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0704 01:08:50.991151 1196445 kubeadm.go:309] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0704 01:08:50.991199 1196445 kubeadm.go:309] [kubelet-start] Starting the kubelet
	I0704 01:08:51.101564 1196445 kubeadm.go:309] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0704 01:08:51.101665 1196445 kubeadm.go:309] [kubelet-check] Waiting for a healthy kubelet. This can take up to 4m0s
	I0704 01:08:52.602456 1196445 kubeadm.go:309] [kubelet-check] The kubelet is healthy after 1.501248538s
	I0704 01:08:52.602548 1196445 kubeadm.go:309] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0704 01:08:59.104335 1196445 kubeadm.go:309] [api-check] The API server is healthy after 6.50185707s
	I0704 01:08:59.129403 1196445 kubeadm.go:309] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0704 01:08:59.143665 1196445 kubeadm.go:309] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0704 01:08:59.169656 1196445 kubeadm.go:309] [upload-certs] Skipping phase. Please see --upload-certs
	I0704 01:08:59.169845 1196445 kubeadm.go:309] [mark-control-plane] Marking the node addons-155517 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0704 01:08:59.181202 1196445 kubeadm.go:309] [bootstrap-token] Using token: 7nkvt2.ozp2nts9dnthdvog
	I0704 01:08:59.182924 1196445 out.go:204]   - Configuring RBAC rules ...
	I0704 01:08:59.183052 1196445 kubeadm.go:309] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0704 01:08:59.189973 1196445 kubeadm.go:309] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0704 01:08:59.198030 1196445 kubeadm.go:309] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0704 01:08:59.202188 1196445 kubeadm.go:309] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0704 01:08:59.205935 1196445 kubeadm.go:309] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0704 01:08:59.209870 1196445 kubeadm.go:309] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0704 01:08:59.517980 1196445 kubeadm.go:309] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0704 01:08:59.966077 1196445 kubeadm.go:309] [addons] Applied essential addon: CoreDNS
	I0704 01:09:00.515905 1196445 kubeadm.go:309] [addons] Applied essential addon: kube-proxy
	I0704 01:09:00.516982 1196445 kubeadm.go:309] 
	I0704 01:09:00.517083 1196445 kubeadm.go:309] Your Kubernetes control-plane has initialized successfully!
	I0704 01:09:00.517098 1196445 kubeadm.go:309] 
	I0704 01:09:00.517185 1196445 kubeadm.go:309] To start using your cluster, you need to run the following as a regular user:
	I0704 01:09:00.517190 1196445 kubeadm.go:309] 
	I0704 01:09:00.517215 1196445 kubeadm.go:309]   mkdir -p $HOME/.kube
	I0704 01:09:00.517272 1196445 kubeadm.go:309]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0704 01:09:00.517328 1196445 kubeadm.go:309]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0704 01:09:00.517334 1196445 kubeadm.go:309] 
	I0704 01:09:00.517385 1196445 kubeadm.go:309] Alternatively, if you are the root user, you can run:
	I0704 01:09:00.517390 1196445 kubeadm.go:309] 
	I0704 01:09:00.517436 1196445 kubeadm.go:309]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0704 01:09:00.517442 1196445 kubeadm.go:309] 
	I0704 01:09:00.517492 1196445 kubeadm.go:309] You should now deploy a pod network to the cluster.
	I0704 01:09:00.517564 1196445 kubeadm.go:309] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0704 01:09:00.517632 1196445 kubeadm.go:309]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0704 01:09:00.517637 1196445 kubeadm.go:309] 
	I0704 01:09:00.517719 1196445 kubeadm.go:309] You can now join any number of control-plane nodes by copying certificate authorities
	I0704 01:09:00.517792 1196445 kubeadm.go:309] and service account keys on each node and then running the following as root:
	I0704 01:09:00.517797 1196445 kubeadm.go:309] 
	I0704 01:09:00.517884 1196445 kubeadm.go:309]   kubeadm join control-plane.minikube.internal:8443 --token 7nkvt2.ozp2nts9dnthdvog \
	I0704 01:09:00.517985 1196445 kubeadm.go:309] 	--discovery-token-ca-cert-hash sha256:5b6b816aa61ec76ffa7acb157372c74648707423ad3df4db41b9bf88dbe1edfa \
	I0704 01:09:00.518011 1196445 kubeadm.go:309] 	--control-plane 
	I0704 01:09:00.518017 1196445 kubeadm.go:309] 
	I0704 01:09:00.518099 1196445 kubeadm.go:309] Then you can join any number of worker nodes by running the following on each as root:
	I0704 01:09:00.518104 1196445 kubeadm.go:309] 
	I0704 01:09:00.518183 1196445 kubeadm.go:309] kubeadm join control-plane.minikube.internal:8443 --token 7nkvt2.ozp2nts9dnthdvog \
	I0704 01:09:00.518281 1196445 kubeadm.go:309] 	--discovery-token-ca-cert-hash sha256:5b6b816aa61ec76ffa7acb157372c74648707423ad3df4db41b9bf88dbe1edfa 
	I0704 01:09:00.520643 1196445 kubeadm.go:309] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1064-aws\n", err: exit status 1
	I0704 01:09:00.520759 1196445 kubeadm.go:309] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0704 01:09:00.520778 1196445 cni.go:84] Creating CNI manager for ""
	I0704 01:09:00.520793 1196445 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0704 01:09:00.523121 1196445 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0704 01:09:00.525213 1196445 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0704 01:09:00.530132 1196445 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.30.2/kubectl ...
	I0704 01:09:00.530152 1196445 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2438 bytes)
	I0704 01:09:00.549314 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0704 01:09:00.814623 1196445 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0704 01:09:00.814727 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:00.814782 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes addons-155517 minikube.k8s.io/updated_at=2024_07_04T01_09_00_0700 minikube.k8s.io/version=v1.33.1 minikube.k8s.io/commit=b003e6195fd8aae2e8757a7316e2960f465339c8 minikube.k8s.io/name=addons-155517 minikube.k8s.io/primary=true
	I0704 01:09:00.972427 1196445 ops.go:34] apiserver oom_adj: -16
	I0704 01:09:00.972641 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:01.472756 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:01.972695 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:02.472885 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:02.973558 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:03.473435 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:03.973670 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:04.473303 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:04.973640 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:05.473357 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:05.973493 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:06.473432 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:06.972686 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:07.472874 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:07.973677 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:08.472770 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:08.973060 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:09.473021 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:09.972794 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:10.472869 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:10.973078 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:11.473672 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:11.972769 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:12.473575 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:12.972752 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:13.473123 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:13.973407 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:14.472957 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:14.571895 1196445 kubeadm.go:1107] duration metric: took 13.757267057s to wait for elevateKubeSystemPrivileges
	W0704 01:09:14.571932 1196445 kubeadm.go:286] apiserver tunnel failed: apiserver port not set
	I0704 01:09:14.571940 1196445 kubeadm.go:393] duration metric: took 31.309027137s to StartCluster
	I0704 01:09:14.571956 1196445 settings.go:142] acquiring lock: {Name:mk6d49b718ddc65478a80e50434df6064c31eee4 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:09:14.572073 1196445 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/18859-1190282/kubeconfig
	I0704 01:09:14.572478 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/kubeconfig: {Name:mkcb1dc68318dea0090dbb67854ab85e2d8d0252 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:09:14.572667 1196445 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0704 01:09:14.572807 1196445 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.30.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0704 01:09:14.573070 1196445 config.go:182] Loaded profile config "addons-155517": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 01:09:14.573109 1196445 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:true csi-hostpath-driver:true dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:true gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:true ingress-dns:true inspektor-gadget:true istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:true nvidia-device-plugin:true nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:true registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:true volcano:true volumesnapshots:true yakd:true]
	I0704 01:09:14.573210 1196445 addons.go:69] Setting yakd=true in profile "addons-155517"
	I0704 01:09:14.573236 1196445 addons.go:234] Setting addon yakd=true in "addons-155517"
	I0704 01:09:14.573263 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.573732 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.574149 1196445 addons.go:69] Setting metrics-server=true in profile "addons-155517"
	I0704 01:09:14.574180 1196445 addons.go:234] Setting addon metrics-server=true in "addons-155517"
	I0704 01:09:14.574206 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.574253 1196445 addons.go:69] Setting nvidia-device-plugin=true in profile "addons-155517"
	I0704 01:09:14.574275 1196445 addons.go:234] Setting addon nvidia-device-plugin=true in "addons-155517"
	I0704 01:09:14.574297 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.574634 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.574730 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.577258 1196445 addons.go:69] Setting registry=true in profile "addons-155517"
	I0704 01:09:14.577394 1196445 addons.go:234] Setting addon registry=true in "addons-155517"
	I0704 01:09:14.577449 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.579535 1196445 addons.go:69] Setting storage-provisioner=true in profile "addons-155517"
	I0704 01:09:14.579602 1196445 addons.go:234] Setting addon storage-provisioner=true in "addons-155517"
	I0704 01:09:14.579640 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.580179 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580252 1196445 addons.go:69] Setting cloud-spanner=true in profile "addons-155517"
	I0704 01:09:14.580300 1196445 addons.go:234] Setting addon cloud-spanner=true in "addons-155517"
	I0704 01:09:14.591703 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.592237 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580425 1196445 addons.go:69] Setting csi-hostpath-driver=true in profile "addons-155517"
	I0704 01:09:14.597313 1196445 addons.go:234] Setting addon csi-hostpath-driver=true in "addons-155517"
	I0704 01:09:14.597353 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.597793 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580434 1196445 addons.go:69] Setting default-storageclass=true in profile "addons-155517"
	I0704 01:09:14.598119 1196445 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "addons-155517"
	I0704 01:09:14.598447 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580438 1196445 addons.go:69] Setting gcp-auth=true in profile "addons-155517"
	I0704 01:09:14.618488 1196445 mustload.go:65] Loading cluster: addons-155517
	I0704 01:09:14.618673 1196445 config.go:182] Loaded profile config "addons-155517": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 01:09:14.618933 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580441 1196445 addons.go:69] Setting ingress=true in profile "addons-155517"
	I0704 01:09:14.633852 1196445 addons.go:234] Setting addon ingress=true in "addons-155517"
	I0704 01:09:14.633911 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.634367 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580444 1196445 addons.go:69] Setting ingress-dns=true in profile "addons-155517"
	I0704 01:09:14.659280 1196445 addons.go:234] Setting addon ingress-dns=true in "addons-155517"
	I0704 01:09:14.659332 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.659837 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580448 1196445 addons.go:69] Setting inspektor-gadget=true in profile "addons-155517"
	I0704 01:09:14.683730 1196445 addons.go:234] Setting addon inspektor-gadget=true in "addons-155517"
	I0704 01:09:14.683777 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.684227 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.698704 1196445 out.go:177]   - Using image nvcr.io/nvidia/k8s-device-plugin:v0.15.1
	I0704 01:09:14.701274 1196445 out.go:177]   - Using image gcr.io/cloud-spanner-emulator/emulator:1.5.17
	I0704 01:09:14.580702 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.719181 1196445 out.go:177]   - Using image docker.io/marcnuri/yakd:0.0.5
	I0704 01:09:14.580712 1196445 out.go:177] * Verifying Kubernetes components...
	I0704 01:09:14.721032 1196445 addons.go:431] installing /etc/kubernetes/addons/yakd-ns.yaml
	I0704 01:09:14.721051 1196445 ssh_runner.go:362] scp yakd/yakd-ns.yaml --> /etc/kubernetes/addons/yakd-ns.yaml (171 bytes)
	I0704 01:09:14.721122 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.580911 1196445 addons.go:69] Setting storage-provisioner-rancher=true in profile "addons-155517"
	I0704 01:09:14.580918 1196445 addons.go:69] Setting volcano=true in profile "addons-155517"
	I0704 01:09:14.723014 1196445 addons.go:234] Setting addon volcano=true in "addons-155517"
	I0704 01:09:14.580922 1196445 addons.go:69] Setting volumesnapshots=true in profile "addons-155517"
	I0704 01:09:14.723103 1196445 addons.go:234] Setting addon volumesnapshots=true in "addons-155517"
	I0704 01:09:14.723131 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.723415 1196445 addons.go:431] installing /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0704 01:09:14.723429 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/nvidia-device-plugin.yaml (1966 bytes)
	I0704 01:09:14.723497 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.729712 1196445 addons_storage_classes.go:33] enableOrDisableStorageClasses storage-provisioner-rancher=true on "addons-155517"
	I0704 01:09:14.730053 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.760655 1196445 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0704 01:09:14.760879 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.761414 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.767033 1196445 addons.go:431] installing /etc/kubernetes/addons/deployment.yaml
	I0704 01:09:14.767054 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/deployment.yaml (1004 bytes)
	I0704 01:09:14.767114 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.808186 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.808661 1196445 out.go:177]   - Using image registry.k8s.io/metrics-server/metrics-server:v0.7.1
	I0704 01:09:14.811351 1196445 addons.go:431] installing /etc/kubernetes/addons/metrics-apiservice.yaml
	I0704 01:09:14.811385 1196445 ssh_runner.go:362] scp metrics-server/metrics-apiservice.yaml --> /etc/kubernetes/addons/metrics-apiservice.yaml (424 bytes)
	I0704 01:09:14.811464 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.828255 1196445 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0704 01:09:14.830324 1196445 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.1
	I0704 01:09:14.854527 1196445 out.go:177]   - Using image gcr.io/k8s-minikube/minikube-ingress-dns:0.0.2
	I0704 01:09:14.854766 1196445 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0704 01:09:14.855575 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0704 01:09:14.855680 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.858854 1196445 addons.go:234] Setting addon default-storageclass=true in "addons-155517"
	I0704 01:09:14.858894 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.859317 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.876557 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.884711 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-provisioner:v3.3.0
	I0704 01:09:14.887689 1196445 addons.go:431] installing /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0704 01:09:14.887713 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-dns-pod.yaml (2442 bytes)
	I0704 01:09:14.887776 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.907776 1196445 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.1
	I0704 01:09:14.921547 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-attacher:v4.0.0
	I0704 01:09:14.946618 1196445 out.go:177]   - Using image registry.k8s.io/ingress-nginx/controller:v1.10.1
	I0704 01:09:14.952851 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:14.958655 1196445 addons.go:431] installing /etc/kubernetes/addons/ingress-deploy.yaml
	I0704 01:09:14.958677 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-deploy.yaml (16078 bytes)
	I0704 01:09:14.958757 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.989105 1196445 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.30.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.30.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0704 01:09:14.998292 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-external-health-monitor-controller:v0.7.0
	I0704 01:09:15.008457 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.010183 1196445 out.go:177]   - Using image docker.io/registry:2.8.3
	I0704 01:09:15.011903 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.6.0
	I0704 01:09:15.016345 1196445 addons.go:234] Setting addon storage-provisioner-rancher=true in "addons-155517"
	I0704 01:09:15.016398 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:15.016856 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:15.017248 1196445 out.go:177]   - Using image ghcr.io/inspektor-gadget/inspektor-gadget:v0.30.0
	I0704 01:09:15.026296 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/snapshot-controller:v6.1.0
	I0704 01:09:15.032293 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/hostpathplugin:v1.9.0
	I0704 01:09:15.033528 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.034311 1196445 out.go:177]   - Using image docker.io/volcanosh/vc-controller-manager:v1.9.0
	I0704 01:09:15.036144 1196445 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml
	I0704 01:09:15.036176 1196445 ssh_runner.go:362] scp volumesnapshots/csi-hostpath-snapshotclass.yaml --> /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml (934 bytes)
	I0704 01:09:15.036273 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.040125 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/livenessprobe:v2.8.0
	I0704 01:09:15.042262 1196445 out.go:177]   - Using image gcr.io/k8s-minikube/kube-registry-proxy:0.0.6
	I0704 01:09:15.042363 1196445 out.go:177]   - Using image docker.io/volcanosh/vc-scheduler:v1.9.0
	I0704 01:09:15.044439 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-namespace.yaml
	I0704 01:09:15.044460 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-namespace.yaml --> /etc/kubernetes/addons/ig-namespace.yaml (55 bytes)
	I0704 01:09:15.044547 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.052699 1196445 addons.go:431] installing /etc/kubernetes/addons/registry-rc.yaml
	I0704 01:09:15.052768 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-rc.yaml (798 bytes)
	I0704 01:09:15.052855 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.066354 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-resizer:v1.6.0
	I0704 01:09:15.068850 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-snapshotter:v6.1.0
	I0704 01:09:15.075588 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-external-attacher.yaml
	I0704 01:09:15.075620 1196445 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-attacher.yaml --> /etc/kubernetes/addons/rbac-external-attacher.yaml (3073 bytes)
	I0704 01:09:15.075727 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.088431 1196445 out.go:177]   - Using image docker.io/volcanosh/vc-webhook-manager:v1.9.0
	I0704 01:09:15.114822 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.115791 1196445 addons.go:431] installing /etc/kubernetes/addons/volcano-deployment.yaml
	I0704 01:09:15.115824 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volcano-deployment.yaml (442770 bytes)
	I0704 01:09:15.115909 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.121303 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.123349 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.160899 1196445 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0704 01:09:15.160920 1196445 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0704 01:09:15.160989 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.162700 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.243792 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.254669 1196445 out.go:177]   - Using image docker.io/busybox:stable
	I0704 01:09:15.256265 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.257205 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.257463 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.259664 1196445 out.go:177]   - Using image docker.io/rancher/local-path-provisioner:v0.0.22
	I0704 01:09:15.261691 1196445 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0704 01:09:15.261712 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner-rancher.yaml (3113 bytes)
	I0704 01:09:15.261776 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.265635 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	W0704 01:09:15.288056 1196445 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0704 01:09:15.288087 1196445 retry.go:31] will retry after 347.093048ms: ssh: handshake failed: EOF
	I0704 01:09:15.307720 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.310433 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.322864 1196445 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0704 01:09:15.588350 1196445 addons.go:431] installing /etc/kubernetes/addons/yakd-sa.yaml
	I0704 01:09:15.588385 1196445 ssh_runner.go:362] scp yakd/yakd-sa.yaml --> /etc/kubernetes/addons/yakd-sa.yaml (247 bytes)
	I0704 01:09:15.621674 1196445 addons.go:431] installing /etc/kubernetes/addons/metrics-server-deployment.yaml
	I0704 01:09:15.621748 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-deployment.yaml (1907 bytes)
	I0704 01:09:15.749570 1196445 addons.go:431] installing /etc/kubernetes/addons/metrics-server-rbac.yaml
	I0704 01:09:15.749601 1196445 ssh_runner.go:362] scp metrics-server/metrics-server-rbac.yaml --> /etc/kubernetes/addons/metrics-server-rbac.yaml (2175 bytes)
	I0704 01:09:15.934643 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0704 01:09:15.971372 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0704 01:09:16.022036 1196445 addons.go:431] installing /etc/kubernetes/addons/metrics-server-service.yaml
	I0704 01:09:16.022108 1196445 ssh_runner.go:362] scp metrics-server/metrics-server-service.yaml --> /etc/kubernetes/addons/metrics-server-service.yaml (446 bytes)
	I0704 01:09:16.110981 1196445 addons.go:431] installing /etc/kubernetes/addons/yakd-crb.yaml
	I0704 01:09:16.111009 1196445 ssh_runner.go:362] scp yakd/yakd-crb.yaml --> /etc/kubernetes/addons/yakd-crb.yaml (422 bytes)
	I0704 01:09:16.121017 1196445 addons.go:431] installing /etc/kubernetes/addons/registry-svc.yaml
	I0704 01:09:16.121043 1196445 ssh_runner.go:362] scp registry/registry-svc.yaml --> /etc/kubernetes/addons/registry-svc.yaml (398 bytes)
	I0704 01:09:16.190720 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0704 01:09:16.201668 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml
	I0704 01:09:16.213599 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0704 01:09:16.238689 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-serviceaccount.yaml
	I0704 01:09:16.238718 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-serviceaccount.yaml --> /etc/kubernetes/addons/ig-serviceaccount.yaml (80 bytes)
	I0704 01:09:16.244065 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml
	I0704 01:09:16.263426 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/deployment.yaml
	I0704 01:09:16.295999 1196445 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml
	I0704 01:09:16.296072 1196445 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotclasses.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml (6471 bytes)
	I0704 01:09:16.307564 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	I0704 01:09:16.362987 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0704 01:09:16.383918 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-hostpath.yaml
	I0704 01:09:16.383945 1196445 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-hostpath.yaml --> /etc/kubernetes/addons/rbac-hostpath.yaml (4266 bytes)
	I0704 01:09:16.394076 1196445 addons.go:431] installing /etc/kubernetes/addons/yakd-svc.yaml
	I0704 01:09:16.394102 1196445 ssh_runner.go:362] scp yakd/yakd-svc.yaml --> /etc/kubernetes/addons/yakd-svc.yaml (412 bytes)
	I0704 01:09:16.417925 1196445 addons.go:431] installing /etc/kubernetes/addons/registry-proxy.yaml
	I0704 01:09:16.417949 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-proxy.yaml (947 bytes)
	I0704 01:09:16.422065 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-role.yaml
	I0704 01:09:16.422088 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-role.yaml --> /etc/kubernetes/addons/ig-role.yaml (210 bytes)
	I0704 01:09:16.522801 1196445 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml
	I0704 01:09:16.522826 1196445 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotcontents.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml (23126 bytes)
	I0704 01:09:16.575145 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml
	I0704 01:09:16.575171 1196445 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-health-monitor-controller.yaml --> /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml (3038 bytes)
	I0704 01:09:16.653193 1196445 addons.go:431] installing /etc/kubernetes/addons/yakd-dp.yaml
	I0704 01:09:16.653217 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-dp.yaml (2017 bytes)
	I0704 01:09:16.659744 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-rolebinding.yaml
	I0704 01:09:16.659770 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-rolebinding.yaml --> /etc/kubernetes/addons/ig-rolebinding.yaml (244 bytes)
	I0704 01:09:16.661739 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml
	I0704 01:09:16.744662 1196445 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml
	I0704 01:09:16.744688 1196445 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshots.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml (19582 bytes)
	I0704 01:09:16.821862 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-external-provisioner.yaml
	I0704 01:09:16.821888 1196445 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-provisioner.yaml --> /etc/kubernetes/addons/rbac-external-provisioner.yaml (4442 bytes)
	I0704 01:09:16.881062 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml
	I0704 01:09:16.901455 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrole.yaml
	I0704 01:09:16.901481 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrole.yaml --> /etc/kubernetes/addons/ig-clusterrole.yaml (1485 bytes)
	I0704 01:09:16.928474 1196445 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.30.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.30.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -": (1.939327122s)
	I0704 01:09:16.928503 1196445 start.go:967] {"host.minikube.internal": 192.168.49.1} host record injected into CoreDNS's ConfigMap
	I0704 01:09:16.929502 1196445 ssh_runner.go:235] Completed: sudo systemctl start kubelet: (1.606615135s)
	I0704 01:09:16.930207 1196445 node_ready.go:35] waiting up to 6m0s for node "addons-155517" to be "Ready" ...
	I0704 01:09:16.937817 1196445 node_ready.go:49] node "addons-155517" has status "Ready":"True"
	I0704 01:09:16.937845 1196445 node_ready.go:38] duration metric: took 7.604392ms for node "addons-155517" to be "Ready" ...
	I0704 01:09:16.937855 1196445 pod_ready.go:35] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0704 01:09:16.962597 1196445 pod_ready.go:78] waiting up to 6m0s for pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:17.098912 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml
	I0704 01:09:17.098985 1196445 ssh_runner.go:362] scp volumesnapshots/rbac-volume-snapshot-controller.yaml --> /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml (3545 bytes)
	I0704 01:09:17.131539 1196445 addons.go:431] installing /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0704 01:09:17.131609 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml (1475 bytes)
	I0704 01:09:17.201528 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-external-resizer.yaml
	I0704 01:09:17.201552 1196445 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-resizer.yaml --> /etc/kubernetes/addons/rbac-external-resizer.yaml (2943 bytes)
	I0704 01:09:17.281563 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrolebinding.yaml
	I0704 01:09:17.281585 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrolebinding.yaml --> /etc/kubernetes/addons/ig-clusterrolebinding.yaml (274 bytes)
	I0704 01:09:17.383540 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-external-snapshotter.yaml
	I0704 01:09:17.383612 1196445 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-snapshotter.yaml --> /etc/kubernetes/addons/rbac-external-snapshotter.yaml (3149 bytes)
	I0704 01:09:17.431613 1196445 kapi.go:248] "coredns" deployment in "kube-system" namespace and "addons-155517" context rescaled to 1 replicas
	I0704 01:09:17.491254 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0704 01:09:17.630881 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-crd.yaml
	I0704 01:09:17.630958 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-crd.yaml --> /etc/kubernetes/addons/ig-crd.yaml (5216 bytes)
	I0704 01:09:17.673774 1196445 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-attacher.yaml
	I0704 01:09:17.673845 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-attacher.yaml (2143 bytes)
	I0704 01:09:17.747064 1196445 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml
	I0704 01:09:17.747141 1196445 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-driverinfo.yaml --> /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml (1274 bytes)
	I0704 01:09:17.825561 1196445 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-plugin.yaml
	I0704 01:09:17.825630 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-plugin.yaml (8201 bytes)
	I0704 01:09:17.839617 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-daemonset.yaml
	I0704 01:09:17.839687 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-daemonset.yaml (7735 bytes)
	I0704 01:09:18.021499 1196445 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-resizer.yaml
	I0704 01:09:18.021581 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-resizer.yaml (2191 bytes)
	I0704 01:09:18.066416 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml
	I0704 01:09:18.103144 1196445 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0704 01:09:18.103227 1196445 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-storageclass.yaml --> /etc/kubernetes/addons/csi-hostpath-storageclass.yaml (846 bytes)
	I0704 01:09:18.206551 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0704 01:09:18.972681 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:19.481076 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml: (3.546393824s)
	I0704 01:09:19.481267 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml: (3.509817533s)
	I0704 01:09:19.707411 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (3.516628882s)
	I0704 01:09:21.018313 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:22.094516 1196445 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_application_credentials.json (162 bytes)
	I0704 01:09:22.094602 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:22.120851 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:22.498250 1196445 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_cloud_project (12 bytes)
	I0704 01:09:22.672494 1196445 addons.go:234] Setting addon gcp-auth=true in "addons-155517"
	I0704 01:09:22.672553 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:22.672999 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:22.693321 1196445 ssh_runner.go:195] Run: cat /var/lib/minikube/google_application_credentials.json
	I0704 01:09:22.693374 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:22.731686 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:23.219625 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml: (7.017917581s)
	I0704 01:09:23.219660 1196445 addons.go:475] Verifying addon ingress=true in "addons-155517"
	I0704 01:09:23.219801 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: (7.006176358s)
	I0704 01:09:23.223048 1196445 out.go:177] * Verifying ingress addon...
	I0704 01:09:23.225816 1196445 kapi.go:75] Waiting for pod with label "app.kubernetes.io/name=ingress-nginx" in ns "ingress-nginx" ...
	I0704 01:09:23.233823 1196445 kapi.go:86] Found 3 Pods for label selector app.kubernetes.io/name=ingress-nginx
	I0704 01:09:23.233848 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:23.469592 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:23.733369 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:24.252870 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:24.738392 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:25.197716 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml: (8.95361222s)
	I0704 01:09:25.197838 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/deployment.yaml: (8.934389448s)
	I0704 01:09:25.197945 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: (8.890302325s)
	I0704 01:09:25.197981 1196445 addons.go:475] Verifying addon metrics-server=true in "addons-155517"
	I0704 01:09:25.198051 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml: (8.835024851s)
	I0704 01:09:25.198102 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml: (8.536339738s)
	I0704 01:09:25.198137 1196445 addons.go:475] Verifying addon registry=true in "addons-155517"
	I0704 01:09:25.198304 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml: (8.317214693s)
	I0704 01:09:25.198380 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (7.707056177s)
	W0704 01:09:25.199807 1196445 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0704 01:09:25.199830 1196445 retry.go:31] will retry after 346.398491ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0704 01:09:25.198438 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml: (7.131941092s)
	I0704 01:09:25.201793 1196445 out.go:177] * Verifying registry addon...
	I0704 01:09:25.201794 1196445 out.go:177] * To access YAKD - Kubernetes Dashboard, wait for Pod to be ready and run the following command:
	
		minikube -p addons-155517 service yakd-dashboard -n yakd-dashboard
	
	I0704 01:09:25.204942 1196445 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=registry" in ns "kube-system" ...
	I0704 01:09:25.222617 1196445 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=registry
	I0704 01:09:25.222690 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:25.251428 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:25.492382 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:25.546710 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0704 01:09:25.735322 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:25.736772 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:25.911009 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml: (7.704355252s)
	I0704 01:09:25.911047 1196445 addons.go:475] Verifying addon csi-hostpath-driver=true in "addons-155517"
	I0704 01:09:25.911278 1196445 ssh_runner.go:235] Completed: cat /var/lib/minikube/google_application_credentials.json: (3.217934924s)
	I0704 01:09:25.914361 1196445 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.1
	I0704 01:09:25.914416 1196445 out.go:177] * Verifying csi-hostpath-driver addon...
	I0704 01:09:25.916249 1196445 out.go:177]   - Using image gcr.io/k8s-minikube/gcp-auth-webhook:v0.1.2
	I0704 01:09:25.917135 1196445 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=csi-hostpath-driver" in ns "kube-system" ...
	I0704 01:09:25.918627 1196445 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-ns.yaml
	I0704 01:09:25.918653 1196445 ssh_runner.go:362] scp gcp-auth/gcp-auth-ns.yaml --> /etc/kubernetes/addons/gcp-auth-ns.yaml (700 bytes)
	I0704 01:09:25.925376 1196445 kapi.go:86] Found 3 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
	I0704 01:09:25.925401 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:25.957453 1196445 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-service.yaml
	I0704 01:09:25.957479 1196445 ssh_runner.go:362] scp gcp-auth/gcp-auth-service.yaml --> /etc/kubernetes/addons/gcp-auth-service.yaml (788 bytes)
	I0704 01:09:25.985833 1196445 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0704 01:09:25.985857 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/gcp-auth-webhook.yaml (5421 bytes)
	I0704 01:09:26.010928 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0704 01:09:26.210069 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:26.232731 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:26.424161 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:26.710162 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:26.731168 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:26.923170 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:27.210427 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:27.230934 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:27.250410 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (1.703595614s)
	I0704 01:09:27.250521 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml: (1.23956747s)
	I0704 01:09:27.253393 1196445 addons.go:475] Verifying addon gcp-auth=true in "addons-155517"
	I0704 01:09:27.257553 1196445 out.go:177] * Verifying gcp-auth addon...
	I0704 01:09:27.260382 1196445 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=gcp-auth" in ns "gcp-auth" ...
	I0704 01:09:27.262998 1196445 kapi.go:86] Found 0 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0704 01:09:27.424027 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:27.710878 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:27.730370 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:27.923796 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:27.969295 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:28.211580 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:28.231431 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:28.422656 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:28.711283 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:28.730044 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:28.924689 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:29.211847 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:29.231572 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:29.425562 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:29.710139 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:29.731027 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:29.923390 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:30.210230 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:30.231858 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:30.424018 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:30.469196 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:30.709718 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:30.730263 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:30.922825 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:31.219839 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:31.241048 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:31.425597 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:31.712545 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:31.731465 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:31.922640 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:32.210366 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:32.230954 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:32.422551 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:32.710061 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:32.730153 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:32.925535 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:32.968875 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:33.209869 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:33.230720 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:33.423400 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:33.710614 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:33.730154 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:33.923184 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:34.211208 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:34.230331 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:34.422808 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:34.710269 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:34.730201 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:34.923588 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:34.969882 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:35.210283 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:35.230873 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:35.422763 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:35.709465 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:35.730270 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:35.923232 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:36.209479 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:36.229850 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:36.423197 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:36.710129 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:36.730676 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:36.922710 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:37.209525 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:37.230493 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:37.424298 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:37.468432 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:37.710143 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:37.730780 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:37.922415 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:38.209676 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:38.230719 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:38.422303 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:38.710124 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:38.730126 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:38.922826 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:39.210085 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:39.230519 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:39.423189 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:39.468806 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:39.710065 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:39.730167 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:39.923743 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:40.209304 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:40.230675 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:40.422186 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:40.710013 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:40.730219 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:40.923123 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:41.211576 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:41.237098 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:41.423541 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:41.469660 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:41.710859 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:41.731203 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:41.923394 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:42.210467 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:42.231587 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:42.423307 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:42.709772 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:42.729958 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:42.923400 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:43.210064 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:43.230380 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:43.423544 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:43.709869 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:43.730334 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:43.925613 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:43.976063 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:44.216521 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:44.233500 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:44.423984 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:44.709890 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:44.730491 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:44.922620 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:45.218269 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:45.232042 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:45.423431 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:45.709814 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:45.730412 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:45.924578 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:46.210332 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:46.231072 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:46.424267 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:46.482456 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:46.709782 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:46.730358 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:46.925109 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:47.210160 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:47.230642 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:47.422979 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:47.710273 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:47.730748 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:47.937531 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:48.210020 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:48.230451 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:48.423194 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:48.709898 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:48.732172 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:48.923290 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:48.969889 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:49.212283 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:49.230721 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:49.423247 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:49.710414 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:49.730459 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:49.922796 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:50.210560 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:50.233100 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:50.423363 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:50.710514 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:50.733774 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:50.922714 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:51.211285 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:51.230995 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:51.423332 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:51.472262 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:51.710160 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:51.730450 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:51.925154 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:52.210272 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:52.231397 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:52.423658 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:52.710851 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:52.730326 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:52.923960 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:53.212484 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:53.232166 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:53.424097 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:53.710891 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:53.731740 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:53.924019 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:53.968656 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:54.211161 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:54.232059 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:54.423259 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:54.712488 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:54.731251 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:54.924094 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:55.210510 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:55.231539 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:55.422966 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:55.711791 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:55.733623 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:55.930765 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:55.969815 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:56.210040 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:56.230481 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:56.423900 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:56.717782 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:56.733051 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:56.924352 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:57.210192 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:57.231623 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:57.423375 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:57.712974 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:57.739221 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:57.924312 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:58.210188 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:58.231084 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:58.428301 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:58.469048 1196445 pod_ready.go:92] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"True"
	I0704 01:09:58.469072 1196445 pod_ready.go:81] duration metric: took 41.506375249s for pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.469083 1196445 pod_ready.go:78] waiting up to 6m0s for pod "coredns-7db6d8ff4d-kd68p" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.470959 1196445 pod_ready.go:97] error getting pod "coredns-7db6d8ff4d-kd68p" in "kube-system" namespace (skipping!): pods "coredns-7db6d8ff4d-kd68p" not found
	I0704 01:09:58.470985 1196445 pod_ready.go:81] duration metric: took 1.892793ms for pod "coredns-7db6d8ff4d-kd68p" in "kube-system" namespace to be "Ready" ...
	E0704 01:09:58.470996 1196445 pod_ready.go:66] WaitExtra: waitPodCondition: error getting pod "coredns-7db6d8ff4d-kd68p" in "kube-system" namespace (skipping!): pods "coredns-7db6d8ff4d-kd68p" not found
	I0704 01:09:58.471003 1196445 pod_ready.go:78] waiting up to 6m0s for pod "etcd-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.476539 1196445 pod_ready.go:92] pod "etcd-addons-155517" in "kube-system" namespace has status "Ready":"True"
	I0704 01:09:58.476565 1196445 pod_ready.go:81] duration metric: took 5.554425ms for pod "etcd-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.476580 1196445 pod_ready.go:78] waiting up to 6m0s for pod "kube-apiserver-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.482116 1196445 pod_ready.go:92] pod "kube-apiserver-addons-155517" in "kube-system" namespace has status "Ready":"True"
	I0704 01:09:58.482141 1196445 pod_ready.go:81] duration metric: took 5.552694ms for pod "kube-apiserver-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.482153 1196445 pod_ready.go:78] waiting up to 6m0s for pod "kube-controller-manager-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.488450 1196445 pod_ready.go:92] pod "kube-controller-manager-addons-155517" in "kube-system" namespace has status "Ready":"True"
	I0704 01:09:58.488476 1196445 pod_ready.go:81] duration metric: took 6.314983ms for pod "kube-controller-manager-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.488488 1196445 pod_ready.go:78] waiting up to 6m0s for pod "kube-proxy-62r6j" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.667045 1196445 pod_ready.go:92] pod "kube-proxy-62r6j" in "kube-system" namespace has status "Ready":"True"
	I0704 01:09:58.667072 1196445 pod_ready.go:81] duration metric: took 178.576255ms for pod "kube-proxy-62r6j" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.667083 1196445 pod_ready.go:78] waiting up to 6m0s for pod "kube-scheduler-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.709864 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:58.730495 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:58.923133 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:59.067974 1196445 pod_ready.go:92] pod "kube-scheduler-addons-155517" in "kube-system" namespace has status "Ready":"True"
	I0704 01:09:59.068046 1196445 pod_ready.go:81] duration metric: took 400.954049ms for pod "kube-scheduler-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:59.068072 1196445 pod_ready.go:38] duration metric: took 42.130204788s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0704 01:09:59.068115 1196445 api_server.go:52] waiting for apiserver process to appear ...
	I0704 01:09:59.068215 1196445 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0704 01:09:59.083368 1196445 api_server.go:72] duration metric: took 44.510664696s to wait for apiserver process to appear ...
	I0704 01:09:59.083466 1196445 api_server.go:88] waiting for apiserver healthz status ...
	I0704 01:09:59.083524 1196445 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0704 01:09:59.092379 1196445 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0704 01:09:59.093665 1196445 api_server.go:141] control plane version: v1.30.2
	I0704 01:09:59.093689 1196445 api_server.go:131] duration metric: took 10.180723ms to wait for apiserver health ...
	I0704 01:09:59.093697 1196445 system_pods.go:43] waiting for kube-system pods to appear ...
	I0704 01:09:59.210881 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:59.230670 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:59.277516 1196445 system_pods.go:59] 18 kube-system pods found
	I0704 01:09:59.277591 1196445 system_pods.go:61] "coredns-7db6d8ff4d-5x2l7" [6344a526-e705-4a50-9a44-66c0d35a0ca8] Running
	I0704 01:09:59.277614 1196445 system_pods.go:61] "csi-hostpath-attacher-0" [e4b634d1-4641-4cde-bcdc-b5e48be74e6b] Pending / Ready:ContainersNotReady (containers with unready status: [csi-attacher]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-attacher])
	I0704 01:09:59.277638 1196445 system_pods.go:61] "csi-hostpath-resizer-0" [4f1a6b44-70cb-43a2-bec7-e0213f06ffd3] Pending / Ready:ContainersNotReady (containers with unready status: [csi-resizer]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-resizer])
	I0704 01:09:59.277674 1196445 system_pods.go:61] "csi-hostpathplugin-bwns5" [a5928f29-2395-4b5b-b09f-baae6183f4ff] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
	I0704 01:09:59.277700 1196445 system_pods.go:61] "etcd-addons-155517" [119c13b1-a5b8-434b-bc29-7f32f6a9ed2b] Running
	I0704 01:09:59.277723 1196445 system_pods.go:61] "kindnet-7qr8x" [c5fbe50b-fa8d-4022-8aa3-bbbca5f27060] Running
	I0704 01:09:59.277743 1196445 system_pods.go:61] "kube-apiserver-addons-155517" [c515e441-ac73-42b4-9e34-b5d4b684a9a4] Running
	I0704 01:09:59.277782 1196445 system_pods.go:61] "kube-controller-manager-addons-155517" [c9334f1b-c695-4aec-aa4d-b853d9bf214c] Running
	I0704 01:09:59.277811 1196445 system_pods.go:61] "kube-ingress-dns-minikube" [b7e45300-1c0f-463c-914d-3febc516e196] Running / Ready:ContainersNotReady (containers with unready status: [minikube-ingress-dns]) / ContainersReady:ContainersNotReady (containers with unready status: [minikube-ingress-dns])
	I0704 01:09:59.277834 1196445 system_pods.go:61] "kube-proxy-62r6j" [4818ac60-abd9-4281-90e2-df11f62e8455] Running
	I0704 01:09:59.277855 1196445 system_pods.go:61] "kube-scheduler-addons-155517" [1802b842-df6a-4872-b790-17ac5e8ad808] Running
	I0704 01:09:59.277890 1196445 system_pods.go:61] "metrics-server-c59844bb4-csqts" [d63007ff-883d-4b4c-b4e4-b83cf3f5e613] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
	I0704 01:09:59.277919 1196445 system_pods.go:61] "nvidia-device-plugin-daemonset-gr25g" [8a2a5166-3ff2-4a7c-8665-62fade8bd24f] Pending / Ready:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr]) / ContainersReady:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr])
	I0704 01:09:59.277940 1196445 system_pods.go:61] "registry-dm5v6" [15499167-b529-4f01-b177-75b6be18e2b5] Running
	I0704 01:09:59.277965 1196445 system_pods.go:61] "registry-proxy-fndt4" [36d06042-3c9e-400c-a673-4f8f17e23b46] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
	I0704 01:09:59.278000 1196445 system_pods.go:61] "snapshot-controller-745499f584-k4nhl" [2536525a-d8b5-4b7d-85e5-2a92611f9623] Running
	I0704 01:09:59.278027 1196445 system_pods.go:61] "snapshot-controller-745499f584-sc4kq" [1a35e6e8-e90a-4d20-972a-3158fa6b4d10] Running
	I0704 01:09:59.278049 1196445 system_pods.go:61] "storage-provisioner" [12fb9780-61c0-4ea8-9b4b-e054c37b7af8] Running
	I0704 01:09:59.278072 1196445 system_pods.go:74] duration metric: took 184.367763ms to wait for pod list to return data ...
	I0704 01:09:59.278106 1196445 default_sa.go:34] waiting for default service account to be created ...
	I0704 01:09:59.423626 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:59.467163 1196445 default_sa.go:45] found service account: "default"
	I0704 01:09:59.467239 1196445 default_sa.go:55] duration metric: took 189.106033ms for default service account to be created ...
	I0704 01:09:59.467265 1196445 system_pods.go:116] waiting for k8s-apps to be running ...
	I0704 01:09:59.676387 1196445 system_pods.go:86] 18 kube-system pods found
	I0704 01:09:59.676469 1196445 system_pods.go:89] "coredns-7db6d8ff4d-5x2l7" [6344a526-e705-4a50-9a44-66c0d35a0ca8] Running
	I0704 01:09:59.676493 1196445 system_pods.go:89] "csi-hostpath-attacher-0" [e4b634d1-4641-4cde-bcdc-b5e48be74e6b] Pending / Ready:ContainersNotReady (containers with unready status: [csi-attacher]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-attacher])
	I0704 01:09:59.676519 1196445 system_pods.go:89] "csi-hostpath-resizer-0" [4f1a6b44-70cb-43a2-bec7-e0213f06ffd3] Pending / Ready:ContainersNotReady (containers with unready status: [csi-resizer]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-resizer])
	I0704 01:09:59.676561 1196445 system_pods.go:89] "csi-hostpathplugin-bwns5" [a5928f29-2395-4b5b-b09f-baae6183f4ff] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
	I0704 01:09:59.676582 1196445 system_pods.go:89] "etcd-addons-155517" [119c13b1-a5b8-434b-bc29-7f32f6a9ed2b] Running
	I0704 01:09:59.676604 1196445 system_pods.go:89] "kindnet-7qr8x" [c5fbe50b-fa8d-4022-8aa3-bbbca5f27060] Running
	I0704 01:09:59.676642 1196445 system_pods.go:89] "kube-apiserver-addons-155517" [c515e441-ac73-42b4-9e34-b5d4b684a9a4] Running
	I0704 01:09:59.676667 1196445 system_pods.go:89] "kube-controller-manager-addons-155517" [c9334f1b-c695-4aec-aa4d-b853d9bf214c] Running
	I0704 01:09:59.676692 1196445 system_pods.go:89] "kube-ingress-dns-minikube" [b7e45300-1c0f-463c-914d-3febc516e196] Running / Ready:ContainersNotReady (containers with unready status: [minikube-ingress-dns]) / ContainersReady:ContainersNotReady (containers with unready status: [minikube-ingress-dns])
	I0704 01:09:59.676714 1196445 system_pods.go:89] "kube-proxy-62r6j" [4818ac60-abd9-4281-90e2-df11f62e8455] Running
	I0704 01:09:59.676748 1196445 system_pods.go:89] "kube-scheduler-addons-155517" [1802b842-df6a-4872-b790-17ac5e8ad808] Running
	I0704 01:09:59.677464 1196445 system_pods.go:89] "metrics-server-c59844bb4-csqts" [d63007ff-883d-4b4c-b4e4-b83cf3f5e613] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
	I0704 01:09:59.677489 1196445 system_pods.go:89] "nvidia-device-plugin-daemonset-gr25g" [8a2a5166-3ff2-4a7c-8665-62fade8bd24f] Pending / Ready:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr]) / ContainersReady:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr])
	I0704 01:09:59.677516 1196445 system_pods.go:89] "registry-dm5v6" [15499167-b529-4f01-b177-75b6be18e2b5] Running
	I0704 01:09:59.677555 1196445 system_pods.go:89] "registry-proxy-fndt4" [36d06042-3c9e-400c-a673-4f8f17e23b46] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
	I0704 01:09:59.677573 1196445 system_pods.go:89] "snapshot-controller-745499f584-k4nhl" [2536525a-d8b5-4b7d-85e5-2a92611f9623] Running
	I0704 01:09:59.677594 1196445 system_pods.go:89] "snapshot-controller-745499f584-sc4kq" [1a35e6e8-e90a-4d20-972a-3158fa6b4d10] Running
	I0704 01:09:59.677626 1196445 system_pods.go:89] "storage-provisioner" [12fb9780-61c0-4ea8-9b4b-e054c37b7af8] Running
	I0704 01:09:59.677655 1196445 system_pods.go:126] duration metric: took 210.368801ms to wait for k8s-apps to be running ...
	I0704 01:09:59.677675 1196445 system_svc.go:44] waiting for kubelet service to be running ....
	I0704 01:09:59.677762 1196445 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0704 01:09:59.696237 1196445 system_svc.go:56] duration metric: took 18.553501ms WaitForService to wait for kubelet
	I0704 01:09:59.696315 1196445 kubeadm.go:576] duration metric: took 45.12361654s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0704 01:09:59.696350 1196445 node_conditions.go:102] verifying NodePressure condition ...
	I0704 01:09:59.714261 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:59.736009 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:59.867078 1196445 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0704 01:09:59.867113 1196445 node_conditions.go:123] node cpu capacity is 2
	I0704 01:09:59.867127 1196445 node_conditions.go:105] duration metric: took 170.755162ms to run NodePressure ...
	I0704 01:09:59.867149 1196445 start.go:241] waiting for startup goroutines ...
	I0704 01:09:59.923389 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:00.218625 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:00.241517 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:00.438305 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:00.710808 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:00.730534 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:00.926440 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:01.210272 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:01.232796 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:01.429002 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:01.710041 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:01.730869 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:01.924722 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:02.210497 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:02.231671 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:02.438549 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:02.710555 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:02.735794 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:02.924538 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:03.210613 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:03.231512 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:03.424944 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:03.711675 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:03.733187 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:03.929527 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:04.209595 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:04.230995 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:04.427465 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:04.710195 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:04.731186 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:04.924972 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:05.211604 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:05.231389 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:05.423431 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:05.711411 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:05.734962 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:05.923586 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:06.210512 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:06.231004 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:06.423318 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:06.711560 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:06.730836 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:06.938452 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:07.210046 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:07.230933 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:07.425316 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:07.710091 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:07.730834 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:07.922615 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:08.211590 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:08.230434 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:08.423538 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:08.710046 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:08.733862 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:08.924256 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:09.209864 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:09.230148 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:09.423756 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:09.710673 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:09.731199 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:09.925117 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:10.212967 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:10.232598 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:10.424087 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:10.713769 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:10.730323 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:10.923900 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:11.210573 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:11.231728 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:11.425825 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:11.709221 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:11.730266 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:11.923382 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:12.209500 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:12.230865 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:12.422855 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:12.709768 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:12.731470 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:12.923005 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:13.209436 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:13.230328 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:13.422933 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:13.709576 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:13.730924 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:13.922558 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:14.210379 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:14.230443 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:14.423308 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:14.710027 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:14.730518 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:14.922814 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:15.211531 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:15.230606 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:15.423172 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:15.711224 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:15.730357 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:15.923854 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:16.214100 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:16.236362 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:16.422969 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:16.709735 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:16.730186 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:16.924010 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:17.209861 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:17.230221 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:17.429620 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:17.711216 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:17.731288 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:17.923272 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:18.210800 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:18.230675 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:18.423721 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:18.709748 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:18.730911 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:18.924515 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:19.210261 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:19.231378 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:19.424525 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:19.709993 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:19.732993 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:19.924083 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:20.210003 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:20.230897 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:20.423785 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:20.711057 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:20.730952 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:20.924157 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:21.210312 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:21.230586 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:21.423795 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:21.710239 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:21.732286 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:21.926006 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:22.210368 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:22.230949 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:22.423210 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:22.709928 1196445 kapi.go:107] duration metric: took 57.504984289s to wait for kubernetes.io/minikube-addons=registry ...
	I0704 01:10:22.731279 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:22.923850 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:23.230884 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:23.423811 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:23.730105 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:23.922926 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:24.229930 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:24.423940 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:24.733279 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:24.923057 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:25.232439 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:25.423165 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:25.731393 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:25.924116 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:26.230888 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:26.423217 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:26.730398 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:26.922554 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:27.231244 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:27.427320 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:27.732045 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:27.924673 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:28.231378 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:28.423152 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:28.730530 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:28.922455 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:29.231220 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:29.423963 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:29.731263 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:29.923847 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:30.234736 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:30.424146 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:30.730525 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:30.923960 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:31.230545 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:31.422889 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:31.730356 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:31.922685 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:32.230220 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:32.423403 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:32.731689 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:32.923390 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:33.230844 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:33.424895 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:33.734207 1196445 kapi.go:107] duration metric: took 1m10.508389549s to wait for app.kubernetes.io/name=ingress-nginx ...
	I0704 01:10:33.927331 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:34.424003 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:34.924230 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:35.422420 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:35.922720 1196445 kapi.go:107] duration metric: took 1m10.005581993s to wait for kubernetes.io/minikube-addons=csi-hostpath-driver ...
	I0704 01:10:50.263958 1196445 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0704 01:10:50.263985 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0704 01:10:50.764249 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0704 01:10:51.264467 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0704 01:10:51.764892 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0704 01:10:52.264523 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0704 01:10:52.764246 1196445 kapi.go:107] duration metric: took 1m25.503859259s to wait for kubernetes.io/minikube-addons=gcp-auth ...
	I0704 01:10:52.766320 1196445 out.go:177] * Your GCP credentials will now be mounted into every pod created in the addons-155517 cluster.
	I0704 01:10:52.767887 1196445 out.go:177] * If you don't want your credentials mounted into a specific pod, add a label with the `gcp-auth-skip-secret` key to your pod configuration.
	I0704 01:10:52.769442 1196445 out.go:177] * If you want existing pods to be mounted with credentials, either recreate them or rerun addons enable with --refresh.
	I0704 01:10:52.771171 1196445 out.go:177] * Enabled addons: nvidia-device-plugin, storage-provisioner-rancher, storage-provisioner, default-storageclass, volcano, cloud-spanner, metrics-server, ingress-dns, inspektor-gadget, yakd, volumesnapshots, registry, ingress, csi-hostpath-driver, gcp-auth
	I0704 01:10:52.772773 1196445 addons.go:510] duration metric: took 1m38.199659863s for enable addons: enabled=[nvidia-device-plugin storage-provisioner-rancher storage-provisioner default-storageclass volcano cloud-spanner metrics-server ingress-dns inspektor-gadget yakd volumesnapshots registry ingress csi-hostpath-driver gcp-auth]
	I0704 01:10:52.772819 1196445 start.go:246] waiting for cluster config update ...
	I0704 01:10:52.772852 1196445 start.go:255] writing updated cluster config ...
	I0704 01:10:52.773191 1196445 ssh_runner.go:195] Run: rm -f paused
	I0704 01:10:53.108848 1196445 start.go:600] kubectl: 1.30.2, cluster: 1.30.2 (minor skew: 0)
	I0704 01:10:53.111322 1196445 out.go:177] * Done! kubectl is now configured to use "addons-155517" cluster and "default" namespace by default
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	ed30151904663       dd1b12fcb6097       5 seconds ago       Exited              hello-world-app           2                   f656a264fb3c3       hello-world-app-86c47465fc-kcpm6
	044ce99825eb4       443d199e8bfcc       32 seconds ago      Running             nginx                     0                   6b6375f3d6290       test-job-nginx-0
	c46f76128f4b3       5461b18aaccf3       33 seconds ago      Running             nginx                     0                   cc913652d669a       nginx
	1e595060faf3e       6cb7dcc2008fa       4 minutes ago       Running             headlamp                  0                   2bdbce3d0506c       headlamp-7867546754-xmxdd
	176e35400e6b4       6ef582f3ec844       4 minutes ago       Running             gcp-auth                  0                   ef5df83821e38       gcp-auth-5db96cd9b4-s44p2
	671aa72a09a1e       8b46b1cd48760       4 minutes ago       Running             admission                 0                   85a1b65a34b3a       volcano-admission-5f7844f7bc-kv4hh
	bcb962151c498       1505f556b3a7b       4 minutes ago       Running             volcano-controllers       0                   969b0063a7566       volcano-controllers-59cb4746db-b7bd8
	591fbae21d6c8       d1ca868ab82aa       5 minutes ago       Running             gadget                    2                   3d0e4ad5c6381       gadget-9pgwd
	e0f993f36de9c       d9c7ad4c226bf       5 minutes ago       Running             volcano-scheduler         0                   60ad9646728c0       volcano-scheduler-844f6db89b-bwtk7
	b600d65f7bbdc       77bdba588b953       5 minutes ago       Running             yakd                      0                   c7305ce05f616       yakd-dashboard-799879c74f-gdj2b
	57beab3f7e83e       2437cf7621777       5 minutes ago       Running             coredns                   0                   c1dabb0588375       coredns-7db6d8ff4d-5x2l7
	ae2716d367698       d1ca868ab82aa       5 minutes ago       Exited              gadget                    1                   3d0e4ad5c6381       gadget-9pgwd
	33899ba7d5911       ba04bb24b9575       5 minutes ago       Running             storage-provisioner       0                   b9a422421a145       storage-provisioner
	67cab13d85edc       89d73d416b992       5 minutes ago       Running             kindnet-cni               0                   d962f3914dbda       kindnet-7qr8x
	8a696cf44b3b1       66dbb96a9149f       5 minutes ago       Running             kube-proxy                0                   e274a6aef2e08       kube-proxy-62r6j
	d26768fcef3f9       e1dcc3400d3ea       6 minutes ago       Running             kube-controller-manager   0                   fad3a49558544       kube-controller-manager-addons-155517
	566fe4aca8adb       c7dd04b1bafeb       6 minutes ago       Running             kube-scheduler            0                   55af540582842       kube-scheduler-addons-155517
	d5678b588829b       84c601f3f72c8       6 minutes ago       Running             kube-apiserver            0                   6b54dc456af60       kube-apiserver-addons-155517
	f517c28ada419       014faa467e297       6 minutes ago       Running             etcd                      0                   edf3cfc1bc506       etcd-addons-155517
	
	
	==> containerd <==
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.016373129Z" level=warning msg="cleaning up after shim disconnected" id=ed30151904663a14e41f5c70c59a3eab039616a701fce65cf6953dbf6aaa6615 namespace=k8s.io
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.016385215Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.018141255Z" level=info msg="TearDown network for sandbox \"9bbb6e04b6a069d9041e41b62bab9bdf46bf09d78709ebc6cd0faccfd5a5ad9b\" successfully"
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.024402757Z" level=warning msg="Failed to get podSandbox status for container event for sandboxID \"9bbb6e04b6a069d9041e41b62bab9bdf46bf09d78709ebc6cd0faccfd5a5ad9b\": an error occurred when try to find sandbox: not found. Sending the event with nil podSandboxStatus."
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.024696617Z" level=info msg="RemovePodSandbox \"9bbb6e04b6a069d9041e41b62bab9bdf46bf09d78709ebc6cd0faccfd5a5ad9b\" returns successfully"
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.243898441Z" level=info msg="Kill container \"368ff31908f3150928ac09c913c85d02979f63f5d28ed967be4e65fbeed5917f\""
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.300556539Z" level=info msg="shim disconnected" id=368ff31908f3150928ac09c913c85d02979f63f5d28ed967be4e65fbeed5917f namespace=k8s.io
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.300625428Z" level=warning msg="cleaning up after shim disconnected" id=368ff31908f3150928ac09c913c85d02979f63f5d28ed967be4e65fbeed5917f namespace=k8s.io
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.300636931Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.320307119Z" level=info msg="StopContainer for \"368ff31908f3150928ac09c913c85d02979f63f5d28ed967be4e65fbeed5917f\" returns successfully"
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.320916583Z" level=info msg="StopPodSandbox for \"68ecc323fb44ac11fac998c858f5b5187fc92fb25ec9b83a8b7001932863af65\""
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.321004253Z" level=info msg="Container to stop \"368ff31908f3150928ac09c913c85d02979f63f5d28ed967be4e65fbeed5917f\" must be in running or unknown state, current state \"CONTAINER_EXITED\""
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.358662843Z" level=info msg="shim disconnected" id=68ecc323fb44ac11fac998c858f5b5187fc92fb25ec9b83a8b7001932863af65 namespace=k8s.io
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.358739018Z" level=warning msg="cleaning up after shim disconnected" id=68ecc323fb44ac11fac998c858f5b5187fc92fb25ec9b83a8b7001932863af65 namespace=k8s.io
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.358749767Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.428462007Z" level=info msg="TearDown network for sandbox \"68ecc323fb44ac11fac998c858f5b5187fc92fb25ec9b83a8b7001932863af65\" successfully"
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.428510670Z" level=info msg="StopPodSandbox for \"68ecc323fb44ac11fac998c858f5b5187fc92fb25ec9b83a8b7001932863af65\" returns successfully"
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.489836926Z" level=info msg="RemoveContainer for \"368ff31908f3150928ac09c913c85d02979f63f5d28ed967be4e65fbeed5917f\""
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.510026208Z" level=info msg="RemoveContainer for \"368ff31908f3150928ac09c913c85d02979f63f5d28ed967be4e65fbeed5917f\" returns successfully"
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.510589716Z" level=error msg="ContainerStatus for \"368ff31908f3150928ac09c913c85d02979f63f5d28ed967be4e65fbeed5917f\" failed" error="rpc error: code = NotFound desc = an error occurred when try to find container \"368ff31908f3150928ac09c913c85d02979f63f5d28ed967be4e65fbeed5917f\": not found"
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.512385156Z" level=info msg="RemoveContainer for \"f8811fe2aec422e26c9c545046a61a18ff1a8af1bd2a1edf62e1ebb4ed0b9605\""
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.518133988Z" level=info msg="RemoveContainer for \"f8811fe2aec422e26c9c545046a61a18ff1a8af1bd2a1edf62e1ebb4ed0b9605\" returns successfully"
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.953577806Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"1e3b18d2f876c1bb4417aae722b878386b6073276d84972e04485183453e9f71\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.965818075Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"c76308010d4a2bc3392d3863a019bd68dabf02a0d5493bc5ffc088a3c75a6ae5\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:15:01 addons-155517 containerd[812]: time="2024-07-04T01:15:01.981189936Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"1efc7a263f7b0c7667fed461ab5d804c2c02949a5afc8becf4a51f6f87cb71d9\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	
	
	==> coredns [57beab3f7e83e507ce7e6fb884cd0d41c8e35bd5a2316cbae1f3e08b24e70f6c] <==
	[INFO] 10.244.0.21:59559 - 20477 "AAAA IN hello-world-app.default.svc.cluster.local.cluster.local. udp 73 false 512" NXDOMAIN qr,aa,rd 166 0.000068315s
	[INFO] 10.244.0.21:59559 - 60172 "A IN hello-world-app.default.svc.cluster.local.us-east-2.compute.internal. udp 86 false 512" NXDOMAIN qr,rd,ra 86 0.001456116s
	[INFO] 10.244.0.21:36722 - 27412 "A IN hello-world-app.default.svc.cluster.local.us-east-2.compute.internal. udp 86 false 512" NXDOMAIN qr,rd,ra 86 0.0042309s
	[INFO] 10.244.0.21:36722 - 60285 "AAAA IN hello-world-app.default.svc.cluster.local.us-east-2.compute.internal. udp 86 false 512" NXDOMAIN qr,rd,ra 86 0.001097978s
	[INFO] 10.244.0.21:59559 - 61805 "AAAA IN hello-world-app.default.svc.cluster.local.us-east-2.compute.internal. udp 86 false 512" NXDOMAIN qr,rd,ra 86 0.002556449s
	[INFO] 10.244.0.21:59559 - 16147 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.000141486s
	[INFO] 10.244.0.21:36722 - 22461 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.000064999s
	[INFO] 10.244.0.21:57285 - 59707 "A IN hello-world-app.default.svc.cluster.local.ingress-nginx.svc.cluster.local. udp 91 false 512" NXDOMAIN qr,aa,rd 184 0.000118414s
	[INFO] 10.244.0.21:45913 - 13762 "A IN hello-world-app.default.svc.cluster.local.ingress-nginx.svc.cluster.local. udp 91 false 512" NXDOMAIN qr,aa,rd 184 0.000060487s
	[INFO] 10.244.0.21:57285 - 15512 "AAAA IN hello-world-app.default.svc.cluster.local.ingress-nginx.svc.cluster.local. udp 91 false 512" NXDOMAIN qr,aa,rd 184 0.000055572s
	[INFO] 10.244.0.21:45913 - 40115 "AAAA IN hello-world-app.default.svc.cluster.local.ingress-nginx.svc.cluster.local. udp 91 false 512" NXDOMAIN qr,aa,rd 184 0.000077143s
	[INFO] 10.244.0.21:57285 - 45360 "A IN hello-world-app.default.svc.cluster.local.svc.cluster.local. udp 77 false 512" NXDOMAIN qr,aa,rd 170 0.000043921s
	[INFO] 10.244.0.21:45913 - 41249 "A IN hello-world-app.default.svc.cluster.local.svc.cluster.local. udp 77 false 512" NXDOMAIN qr,aa,rd 170 0.000317515s
	[INFO] 10.244.0.21:57285 - 62178 "AAAA IN hello-world-app.default.svc.cluster.local.svc.cluster.local. udp 77 false 512" NXDOMAIN qr,aa,rd 170 0.000053209s
	[INFO] 10.244.0.21:45913 - 14460 "AAAA IN hello-world-app.default.svc.cluster.local.svc.cluster.local. udp 77 false 512" NXDOMAIN qr,aa,rd 170 0.000093643s
	[INFO] 10.244.0.21:45913 - 54981 "A IN hello-world-app.default.svc.cluster.local.cluster.local. udp 73 false 512" NXDOMAIN qr,aa,rd 166 0.000054686s
	[INFO] 10.244.0.21:57285 - 1562 "A IN hello-world-app.default.svc.cluster.local.cluster.local. udp 73 false 512" NXDOMAIN qr,aa,rd 166 0.00009887s
	[INFO] 10.244.0.21:57285 - 58947 "AAAA IN hello-world-app.default.svc.cluster.local.cluster.local. udp 73 false 512" NXDOMAIN qr,aa,rd 166 0.00007085s
	[INFO] 10.244.0.21:45913 - 40651 "AAAA IN hello-world-app.default.svc.cluster.local.cluster.local. udp 73 false 512" NXDOMAIN qr,aa,rd 166 0.000167766s
	[INFO] 10.244.0.21:57285 - 37296 "A IN hello-world-app.default.svc.cluster.local.us-east-2.compute.internal. udp 86 false 512" NXDOMAIN qr,rd,ra 86 0.001666836s
	[INFO] 10.244.0.21:45913 - 32504 "A IN hello-world-app.default.svc.cluster.local.us-east-2.compute.internal. udp 86 false 512" NXDOMAIN qr,rd,ra 86 0.001799518s
	[INFO] 10.244.0.21:57285 - 2249 "AAAA IN hello-world-app.default.svc.cluster.local.us-east-2.compute.internal. udp 86 false 512" NXDOMAIN qr,rd,ra 86 0.004449422s
	[INFO] 10.244.0.21:45913 - 59012 "AAAA IN hello-world-app.default.svc.cluster.local.us-east-2.compute.internal. udp 86 false 512" NXDOMAIN qr,rd,ra 86 0.004617779s
	[INFO] 10.244.0.21:57285 - 59096 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.000076642s
	[INFO] 10.244.0.21:45913 - 58622 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.00010856s
	
	
	==> describe nodes <==
	Name:               addons-155517
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=addons-155517
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=b003e6195fd8aae2e8757a7316e2960f465339c8
	                    minikube.k8s.io/name=addons-155517
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_07_04T01_09_00_0700
	                    minikube.k8s.io/version=v1.33.1
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	                    topology.hostpath.csi/node=addons-155517
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Thu, 04 Jul 2024 01:08:57 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  addons-155517
	  AcquireTime:     <unset>
	  RenewTime:       Thu, 04 Jul 2024 01:14:56 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Thu, 04 Jul 2024 01:12:34 +0000   Thu, 04 Jul 2024 01:08:54 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Thu, 04 Jul 2024 01:12:34 +0000   Thu, 04 Jul 2024 01:08:54 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Thu, 04 Jul 2024 01:12:34 +0000   Thu, 04 Jul 2024 01:08:54 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Thu, 04 Jul 2024 01:12:34 +0000   Thu, 04 Jul 2024 01:09:10 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    addons-155517
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022360Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022360Ki
	  pods:               110
	System Info:
	  Machine ID:                 9fd27c19cbdd40e797fc2e621404e195
	  System UUID:                ff208bb9-dbd1-4ea7-9a2d-001b90e3d2d4
	  Boot ID:                    8f650b57-d36f-4952-bd7f-5577bab5f375
	  Kernel Version:             5.15.0-1064-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.18
	  Kubelet Version:            v1.30.2
	  Kube-Proxy Version:         v1.30.2
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (18 in total)
	  Namespace                   Name                                     CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                     ------------  ----------  ---------------  -------------  ---
	  default                     hello-world-app-86c47465fc-kcpm6         0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         26s
	  default                     nginx                                    0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         35s
	  gadget                      gadget-9pgwd                             0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         5m45s
	  gcp-auth                    gcp-auth-5db96cd9b4-s44p2                0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         4m16s
	  headlamp                    headlamp-7867546754-xmxdd                0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         4m12s
	  kube-system                 coredns-7db6d8ff4d-5x2l7                 100m (5%!)(MISSING)     0 (0%!)(MISSING)      70Mi (0%!)(MISSING)        170Mi (2%!)(MISSING)     5m52s
	  kube-system                 etcd-addons-155517                       100m (5%!)(MISSING)     0 (0%!)(MISSING)      100Mi (1%!)(MISSING)       0 (0%!)(MISSING)         6m6s
	  kube-system                 kindnet-7qr8x                            100m (5%!)(MISSING)     100m (5%!)(MISSING)   50Mi (0%!)(MISSING)        50Mi (0%!)(MISSING)      5m52s
	  kube-system                 kube-apiserver-addons-155517             250m (12%!)(MISSING)    0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         6m6s
	  kube-system                 kube-controller-manager-addons-155517    200m (10%!)(MISSING)    0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         6m6s
	  kube-system                 kube-proxy-62r6j                         0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         5m52s
	  kube-system                 kube-scheduler-addons-155517             100m (5%!)(MISSING)     0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         6m6s
	  kube-system                 storage-provisioner                      0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         5m47s
	  my-volcano                  test-job-nginx-0                         1 (50%!)(MISSING)       1 (50%!)(MISSING)     0 (0%!)(MISSING)           0 (0%!)(MISSING)         3m46s
	  volcano-system              volcano-admission-5f7844f7bc-kv4hh       0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         5m43s
	  volcano-system              volcano-controllers-59cb4746db-b7bd8     0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         5m42s
	  volcano-system              volcano-scheduler-844f6db89b-bwtk7       0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         5m42s
	  yakd-dashboard              yakd-dashboard-799879c74f-gdj2b          0 (0%!)(MISSING)        0 (0%!)(MISSING)      128Mi (1%!)(MISSING)       256Mi (3%!)(MISSING)     5m45s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests     Limits
	  --------           --------     ------
	  cpu                1850m (92%!)(MISSING)  1100m (55%!)(MISSING)
	  memory             348Mi (4%!)(MISSING)   476Mi (6%!)(MISSING)
	  ephemeral-storage  0 (0%!)(MISSING)       0 (0%!)(MISSING)
	  hugepages-1Gi      0 (0%!)(MISSING)       0 (0%!)(MISSING)
	  hugepages-2Mi      0 (0%!)(MISSING)       0 (0%!)(MISSING)
	  hugepages-32Mi     0 (0%!)(MISSING)       0 (0%!)(MISSING)
	  hugepages-64Ki     0 (0%!)(MISSING)       0 (0%!)(MISSING)
	Events:
	  Type    Reason                   Age    From             Message
	  ----    ------                   ----   ----             -------
	  Normal  Starting                 5m50s  kube-proxy       
	  Normal  Starting                 6m7s   kubelet          Starting kubelet.
	  Normal  NodeHasSufficientMemory  6m7s   kubelet          Node addons-155517 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    6m7s   kubelet          Node addons-155517 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     6m7s   kubelet          Node addons-155517 status is now: NodeHasSufficientPID
	  Normal  NodeNotReady             6m7s   kubelet          Node addons-155517 status is now: NodeNotReady
	  Normal  NodeAllocatableEnforced  6m6s   kubelet          Updated Node Allocatable limit across pods
	  Normal  NodeReady                5m56s  kubelet          Node addons-155517 status is now: NodeReady
	  Normal  RegisteredNode           5m53s  node-controller  Node addons-155517 event: Registered Node addons-155517 in Controller
	
	
	==> dmesg <==
	[  +0.001017] FS-Cache: O-key=[8] '1671ed0000000000'
	[  +0.000678] FS-Cache: N-cookie c=00000030 [p=00000027 fl=2 nc=0 na=1]
	[  +0.000893] FS-Cache: N-cookie d=00000000915207cd{9p.inode} n=00000000b09744a6
	[  +0.001005] FS-Cache: N-key=[8] '1671ed0000000000'
	[  +0.002621] FS-Cache: Duplicate cookie detected
	[  +0.000687] FS-Cache: O-cookie c=0000002a [p=00000027 fl=226 nc=0 na=1]
	[  +0.000917] FS-Cache: O-cookie d=00000000915207cd{9p.inode} n=000000001e9df579
	[  +0.001010] FS-Cache: O-key=[8] '1671ed0000000000'
	[  +0.000678] FS-Cache: N-cookie c=00000031 [p=00000027 fl=2 nc=0 na=1]
	[  +0.000899] FS-Cache: N-cookie d=00000000915207cd{9p.inode} n=00000000dd6a8763
	[  +0.001363] FS-Cache: N-key=[8] '1671ed0000000000'
	[  +2.399414] FS-Cache: Duplicate cookie detected
	[  +0.000668] FS-Cache: O-cookie c=00000028 [p=00000027 fl=226 nc=0 na=1]
	[  +0.000927] FS-Cache: O-cookie d=00000000915207cd{9p.inode} n=00000000807950c0
	[  +0.001002] FS-Cache: O-key=[8] '1571ed0000000000'
	[  +0.000681] FS-Cache: N-cookie c=00000033 [p=00000027 fl=2 nc=0 na=1]
	[  +0.000944] FS-Cache: N-cookie d=00000000915207cd{9p.inode} n=00000000b09744a6
	[  +0.000997] FS-Cache: N-key=[8] '1571ed0000000000'
	[  +0.414782] FS-Cache: Duplicate cookie detected
	[  +0.000671] FS-Cache: O-cookie c=0000002d [p=00000027 fl=226 nc=0 na=1]
	[  +0.000917] FS-Cache: O-cookie d=00000000915207cd{9p.inode} n=0000000007b93ca1
	[  +0.000982] FS-Cache: O-key=[8] '1b71ed0000000000'
	[  +0.000661] FS-Cache: N-cookie c=00000034 [p=00000027 fl=2 nc=0 na=1]
	[  +0.000895] FS-Cache: N-cookie d=00000000915207cd{9p.inode} n=00000000f6098f89
	[  +0.000977] FS-Cache: N-key=[8] '1b71ed0000000000'
	
	
	==> etcd [f517c28ada41915907ef5e67d80504dca45673592d66bc4f85c35c8241aa8787] <==
	{"level":"info","ts":"2024-07-04T01:08:53.343057Z","caller":"embed/etcd.go:569","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-07-04T01:08:53.344289Z","caller":"etcdserver/server.go:744","msg":"started as single-node; fast-forwarding election ticks","local-member-id":"aec36adc501070cc","forward-ticks":9,"forward-duration":"900ms","election-ticks":10,"election-timeout":"1s"}
	{"level":"info","ts":"2024-07-04T01:08:53.344386Z","caller":"fileutil/purge.go:50","msg":"started to purge file","dir":"/var/lib/minikube/etcd/member/snap","suffix":"snap.db","max":5,"interval":"30s"}
	{"level":"info","ts":"2024-07-04T01:08:53.344412Z","caller":"fileutil/purge.go:50","msg":"started to purge file","dir":"/var/lib/minikube/etcd/member/snap","suffix":"snap","max":5,"interval":"30s"}
	{"level":"info","ts":"2024-07-04T01:08:53.344432Z","caller":"fileutil/purge.go:50","msg":"started to purge file","dir":"/var/lib/minikube/etcd/member/wal","suffix":"wal","max":5,"interval":"30s"}
	{"level":"info","ts":"2024-07-04T01:08:53.34541Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc switched to configuration voters=(12593026477526642892)"}
	{"level":"info","ts":"2024-07-04T01:08:53.345493Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","added-peer-id":"aec36adc501070cc","added-peer-peer-urls":["https://192.168.49.2:2380"]}
	{"level":"info","ts":"2024-07-04T01:08:53.831529Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 1"}
	{"level":"info","ts":"2024-07-04T01:08:53.831678Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 1"}
	{"level":"info","ts":"2024-07-04T01:08:53.831724Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 1"}
	{"level":"info","ts":"2024-07-04T01:08:53.831768Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 2"}
	{"level":"info","ts":"2024-07-04T01:08:53.831803Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-07-04T01:08:53.831834Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 2"}
	{"level":"info","ts":"2024-07-04T01:08:53.831875Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-07-04T01:08:53.833983Z","caller":"etcdserver/server.go:2578","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-07-04T01:08:53.834852Z","caller":"etcdserver/server.go:2068","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:addons-155517 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-07-04T01:08:53.835607Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-07-04T01:08:53.835809Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-07-04T01:08:53.836016Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-07-04T01:08:53.836115Z","caller":"etcdserver/server.go:2602","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-07-04T01:08:53.836033Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-07-04T01:08:53.84114Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-07-04T01:08:53.856473Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-07-04T01:08:53.859549Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-07-04T01:08:53.860789Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	
	
	==> gcp-auth [176e35400e6b4d15467a19b19558445b7d1b5dbec42ad8098a036e029f6b3077] <==
	2024/07/04 01:10:54 Ready to write response ...
	2024/07/04 01:10:54 Ready to marshal response ...
	2024/07/04 01:10:54 Ready to write response ...
	2024/07/04 01:10:54 Ready to marshal response ...
	2024/07/04 01:10:54 Ready to write response ...
	2024/07/04 01:11:04 Ready to marshal response ...
	2024/07/04 01:11:04 Ready to write response ...
	2024/07/04 01:11:19 Ready to marshal response ...
	2024/07/04 01:11:19 Ready to write response ...
	2024/07/04 01:11:19 Ready to marshal response ...
	2024/07/04 01:11:19 Ready to write response ...
	2024/07/04 01:11:20 Ready to marshal response ...
	2024/07/04 01:11:20 Ready to write response ...
	2024/07/04 01:11:20 Ready to marshal response ...
	2024/07/04 01:11:20 Ready to write response ...
	2024/07/04 01:11:28 Ready to marshal response ...
	2024/07/04 01:11:28 Ready to write response ...
	2024/07/04 01:12:21 Ready to marshal response ...
	2024/07/04 01:12:21 Ready to write response ...
	2024/07/04 01:12:36 Ready to marshal response ...
	2024/07/04 01:12:36 Ready to write response ...
	2024/07/04 01:14:31 Ready to marshal response ...
	2024/07/04 01:14:31 Ready to write response ...
	2024/07/04 01:14:40 Ready to marshal response ...
	2024/07/04 01:14:40 Ready to write response ...
	
	
	==> kernel <==
	 01:15:06 up  6:57,  0 users,  load average: 0.48, 2.01, 2.94
	Linux addons-155517 5.15.0-1064-aws #70~20.04.1-Ubuntu SMP Thu Jun 27 14:52:48 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [67cab13d85edc9d5c0f37e6bf189f122a928a79d09306ee9a7e93dbc16acca46] <==
	I0704 01:13:06.177951       1 main.go:227] handling current node
	I0704 01:13:16.183077       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:13:16.183119       1 main.go:227] handling current node
	I0704 01:13:26.195234       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:13:26.195260       1 main.go:227] handling current node
	I0704 01:13:36.207113       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:13:36.207144       1 main.go:227] handling current node
	I0704 01:13:46.211555       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:13:46.211583       1 main.go:227] handling current node
	I0704 01:13:56.222734       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:13:56.222773       1 main.go:227] handling current node
	I0704 01:14:06.226686       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:14:06.226718       1 main.go:227] handling current node
	I0704 01:14:16.230045       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:14:16.230074       1 main.go:227] handling current node
	I0704 01:14:26.239371       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:14:26.239399       1 main.go:227] handling current node
	I0704 01:14:36.251388       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:14:36.251418       1 main.go:227] handling current node
	I0704 01:14:46.255390       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:14:46.255448       1 main.go:227] handling current node
	I0704 01:14:56.267132       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:14:56.267349       1 main.go:227] handling current node
	I0704 01:15:06.279702       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:15:06.279731       1 main.go:227] handling current node
	
	
	==> kube-apiserver [d5678b588829bae555d8757bb9d8a3f9d182137807bb650acee005f9f590f5d2] <==
	I0704 01:11:20.626614       1 controller.go:615] quota admission added evaluator for: jobs.batch.volcano.sh
	I0704 01:11:20.663546       1 controller.go:615] quota admission added evaluator for: podgroups.scheduling.volcano.sh
	E0704 01:11:44.500768       1 authentication.go:73] "Unable to authenticate the request" err="[invalid bearer token, serviceaccounts \"local-path-provisioner-service-account\" not found]"
	I0704 01:12:33.267448       1 controller.go:615] quota admission added evaluator for: volumesnapshots.snapshot.storage.k8s.io
	E0704 01:12:45.514392       1 authentication.go:73] "Unable to authenticate the request" err="[invalid bearer token, serviceaccounts \"csi-hostpathplugin-sa\" not found]"
	I0704 01:12:52.070076       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
	I0704 01:12:52.070121       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
	I0704 01:12:52.111653       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
	I0704 01:12:52.111699       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
	I0704 01:12:52.123212       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
	I0704 01:12:52.123255       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
	I0704 01:12:52.144000       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
	I0704 01:12:52.144039       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
	I0704 01:12:52.193146       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
	I0704 01:12:52.193191       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
	W0704 01:12:53.114139       1 cacher.go:168] Terminating all watchers from cacher volumesnapshotclasses.snapshot.storage.k8s.io
	W0704 01:12:53.193357       1 cacher.go:168] Terminating all watchers from cacher volumesnapshotcontents.snapshot.storage.k8s.io
	W0704 01:12:53.202807       1 cacher.go:168] Terminating all watchers from cacher volumesnapshots.snapshot.storage.k8s.io
	I0704 01:12:58.929384       1 handler.go:286] Adding GroupVersion gadget.kinvolk.io v1alpha1 to ResourceManager
	W0704 01:12:59.966476       1 cacher.go:168] Terminating all watchers from cacher traces.gadget.kinvolk.io
	I0704 01:14:31.058045       1 controller.go:615] quota admission added evaluator for: ingresses.networking.k8s.io
	I0704 01:14:31.339872       1 alloc.go:330] "allocated clusterIPs" service="default/nginx" clusterIPs={"IPv4":"10.109.38.209"}
	I0704 01:14:40.977207       1 alloc.go:330] "allocated clusterIPs" service="default/hello-world-app" clusterIPs={"IPv4":"10.109.173.179"}
	E0704 01:14:58.284557       1 authentication.go:73] "Unable to authenticate the request" err="[invalid bearer token, serviceaccounts \"ingress-nginx\" not found]"
	E0704 01:14:58.373464       1 authentication.go:73] "Unable to authenticate the request" err="[invalid bearer token, serviceaccounts \"ingress-nginx\" not found]"
	
	
	==> kube-controller-manager [d26768fcef3f904fbe4d8309b2336e0d0536a0636f241b26984323c589bd890e] <==
	E0704 01:14:06.338750       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	W0704 01:14:16.691107       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:14:16.691151       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	W0704 01:14:17.365437       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:14:17.365477       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:14:18.603582       1 namespace_controller.go:159] deletion of namespace gadget failed: unexpected items still remain in namespace: gadget for gvr: /v1, Resource=pods
	W0704 01:14:20.463017       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:14:20.463055       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	I0704 01:14:30.510538       1 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/metrics-server-c59844bb4" duration="5.423µs"
	I0704 01:14:40.858284       1 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/hello-world-app-86c47465fc" duration="82.190511ms"
	I0704 01:14:40.875390       1 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/hello-world-app-86c47465fc" duration="17.051242ms"
	I0704 01:14:40.876471       1 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/hello-world-app-86c47465fc" duration="44.085µs"
	I0704 01:14:40.887639       1 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/hello-world-app-86c47465fc" duration="60.913µs"
	I0704 01:14:43.447804       1 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/hello-world-app-86c47465fc" duration="37.833µs"
	I0704 01:14:44.456365       1 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/hello-world-app-86c47465fc" duration="39.638µs"
	I0704 01:14:45.456638       1 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/hello-world-app-86c47465fc" duration="40.582µs"
	W0704 01:14:45.646663       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:14:45.646704       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	W0704 01:14:58.175897       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:14:58.176032       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	I0704 01:14:58.199143       1 job_controller.go:566] "enqueueing job" logger="job-controller" key="ingress-nginx/ingress-nginx-admission-create"
	I0704 01:14:58.206089       1 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="ingress-nginx/ingress-nginx-controller-768f948f8f" duration="7.286µs"
	I0704 01:14:58.214521       1 job_controller.go:566] "enqueueing job" logger="job-controller" key="ingress-nginx/ingress-nginx-admission-patch"
	E0704 01:14:59.700636       1 namespace_controller.go:159] deletion of namespace gadget failed: unexpected items still remain in namespace: gadget for gvr: /v1, Resource=pods
	I0704 01:15:01.506970       1 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/hello-world-app-86c47465fc" duration="43.068µs"
	
	
	==> kube-proxy [8a696cf44b3b1b9718c7bc5215c5dd91d048a8496501845e4b243c3a46ba4f90] <==
	I0704 01:09:15.745880       1 server_linux.go:69] "Using iptables proxy"
	I0704 01:09:15.772902       1 server.go:1062] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	I0704 01:09:15.799195       1 server.go:659] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0704 01:09:15.799240       1 server_linux.go:165] "Using iptables Proxier"
	I0704 01:09:15.804393       1 server_linux.go:511] "Detect-local-mode set to ClusterCIDR, but no cluster CIDR for family" ipFamily="IPv6"
	I0704 01:09:15.804419       1 server_linux.go:528] "Defaulting to no-op detect-local"
	I0704 01:09:15.804446       1 proxier.go:243] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses"
	I0704 01:09:15.804673       1 server.go:872] "Version info" version="v1.30.2"
	I0704 01:09:15.804688       1 server.go:874] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0704 01:09:15.805805       1 config.go:192] "Starting service config controller"
	I0704 01:09:15.805822       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0704 01:09:15.805846       1 config.go:101] "Starting endpoint slice config controller"
	I0704 01:09:15.805850       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0704 01:09:15.808392       1 config.go:319] "Starting node config controller"
	I0704 01:09:15.808406       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0704 01:09:15.906774       1 shared_informer.go:320] Caches are synced for service config
	I0704 01:09:15.906727       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0704 01:09:15.908585       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [566fe4aca8adb536ad06b4727d9447ef68bd730ff7b9e8ddd94c6dfc6a8de11a] <==
	W0704 01:08:57.400228       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0704 01:08:57.400246       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	W0704 01:08:57.400636       1 reflector.go:547] runtime/asm_arm64.s:1222: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0704 01:08:57.400664       1 reflector.go:150] runtime/asm_arm64.s:1222: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	W0704 01:08:57.400873       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0704 01:08:57.400898       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	W0704 01:08:57.401054       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0704 01:08:57.401076       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	W0704 01:08:57.401091       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0704 01:08:57.401108       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	W0704 01:08:57.401143       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0704 01:08:57.401163       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	W0704 01:08:57.401174       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0704 01:08:57.401182       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	W0704 01:08:58.239191       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0704 01:08:58.239466       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	W0704 01:08:58.379662       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0704 01:08:58.379764       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	W0704 01:08:58.415451       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0704 01:08:58.415596       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	W0704 01:08:58.433246       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0704 01:08:58.433288       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	W0704 01:08:58.520896       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0704 01:08:58.520940       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	I0704 01:08:58.883633       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Jul 04 01:14:59 addons-155517 kubelet[1544]: I0704 01:14:59.866820    1544 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a59d670-ac1a-4eb9-aa7a-59f516352eff" path="/var/lib/kubelet/pods/2a59d670-ac1a-4eb9-aa7a-59f516352eff/volumes"
	Jul 04 01:14:59 addons-155517 kubelet[1544]: I0704 01:14:59.867905    1544 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43b6a680-b49d-43a4-aab1-37f773331f42" path="/var/lib/kubelet/pods/43b6a680-b49d-43a4-aab1-37f773331f42/volumes"
	Jul 04 01:15:00 addons-155517 kubelet[1544]: I0704 01:15:00.834491    1544 scope.go:117] "RemoveContainer" containerID="2ba7cf4a30a861d1b8f4b0f19f8a91865eb68f9c472d434a44413ba88c402a13"
	Jul 04 01:15:00 addons-155517 kubelet[1544]: I0704 01:15:00.852090    1544 scope.go:117] "RemoveContainer" containerID="3ea1321729282dfc5ddb130ab314e0fccf692a5ede76b43aaebd7828f318f7c3"
	Jul 04 01:15:00 addons-155517 kubelet[1544]: I0704 01:15:00.862388    1544 scope.go:117] "RemoveContainer" containerID="f8811fe2aec422e26c9c545046a61a18ff1a8af1bd2a1edf62e1ebb4ed0b9605"
	Jul 04 01:15:01 addons-155517 kubelet[1544]: I0704 01:15:01.488141    1544 scope.go:117] "RemoveContainer" containerID="368ff31908f3150928ac09c913c85d02979f63f5d28ed967be4e65fbeed5917f"
	Jul 04 01:15:01 addons-155517 kubelet[1544]: I0704 01:15:01.492491    1544 scope.go:117] "RemoveContainer" containerID="ed30151904663a14e41f5c70c59a3eab039616a701fce65cf6953dbf6aaa6615"
	Jul 04 01:15:01 addons-155517 kubelet[1544]: E0704 01:15:01.493078    1544 pod_workers.go:1298] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"hello-world-app\" with CrashLoopBackOff: \"back-off 20s restarting failed container=hello-world-app pod=hello-world-app-86c47465fc-kcpm6_default(c1e9f2c8-cd69-4f67-9fcc-90fb2499e32f)\"" pod="default/hello-world-app-86c47465fc-kcpm6" podUID="c1e9f2c8-cd69-4f67-9fcc-90fb2499e32f"
	Jul 04 01:15:01 addons-155517 kubelet[1544]: I0704 01:15:01.510285    1544 scope.go:117] "RemoveContainer" containerID="368ff31908f3150928ac09c913c85d02979f63f5d28ed967be4e65fbeed5917f"
	Jul 04 01:15:01 addons-155517 kubelet[1544]: E0704 01:15:01.510874    1544 remote_runtime.go:432] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = an error occurred when try to find container \"368ff31908f3150928ac09c913c85d02979f63f5d28ed967be4e65fbeed5917f\": not found" containerID="368ff31908f3150928ac09c913c85d02979f63f5d28ed967be4e65fbeed5917f"
	Jul 04 01:15:01 addons-155517 kubelet[1544]: I0704 01:15:01.510914    1544 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"containerd","ID":"368ff31908f3150928ac09c913c85d02979f63f5d28ed967be4e65fbeed5917f"} err="failed to get container status \"368ff31908f3150928ac09c913c85d02979f63f5d28ed967be4e65fbeed5917f\": rpc error: code = NotFound desc = an error occurred when try to find container \"368ff31908f3150928ac09c913c85d02979f63f5d28ed967be4e65fbeed5917f\": not found"
	Jul 04 01:15:01 addons-155517 kubelet[1544]: I0704 01:15:01.510941    1544 scope.go:117] "RemoveContainer" containerID="f8811fe2aec422e26c9c545046a61a18ff1a8af1bd2a1edf62e1ebb4ed0b9605"
	Jul 04 01:15:01 addons-155517 kubelet[1544]: I0704 01:15:01.528219    1544 reconciler_common.go:161] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bcfbe9c4-d293-405f-9f84-a6c0799c55ad-webhook-cert\") pod \"bcfbe9c4-d293-405f-9f84-a6c0799c55ad\" (UID: \"bcfbe9c4-d293-405f-9f84-a6c0799c55ad\") "
	Jul 04 01:15:01 addons-155517 kubelet[1544]: I0704 01:15:01.528283    1544 reconciler_common.go:161] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4svgz\" (UniqueName: \"kubernetes.io/projected/bcfbe9c4-d293-405f-9f84-a6c0799c55ad-kube-api-access-4svgz\") pod \"bcfbe9c4-d293-405f-9f84-a6c0799c55ad\" (UID: \"bcfbe9c4-d293-405f-9f84-a6c0799c55ad\") "
	Jul 04 01:15:01 addons-155517 kubelet[1544]: I0704 01:15:01.531934    1544 operation_generator.go:887] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcfbe9c4-d293-405f-9f84-a6c0799c55ad-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "bcfbe9c4-d293-405f-9f84-a6c0799c55ad" (UID: "bcfbe9c4-d293-405f-9f84-a6c0799c55ad"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue ""
	Jul 04 01:15:01 addons-155517 kubelet[1544]: I0704 01:15:01.531934    1544 operation_generator.go:887] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcfbe9c4-d293-405f-9f84-a6c0799c55ad-kube-api-access-4svgz" (OuterVolumeSpecName: "kube-api-access-4svgz") pod "bcfbe9c4-d293-405f-9f84-a6c0799c55ad" (UID: "bcfbe9c4-d293-405f-9f84-a6c0799c55ad"). InnerVolumeSpecName "kube-api-access-4svgz". PluginName "kubernetes.io/projected", VolumeGidValue ""
	Jul 04 01:15:01 addons-155517 kubelet[1544]: I0704 01:15:01.629640    1544 reconciler_common.go:289] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bcfbe9c4-d293-405f-9f84-a6c0799c55ad-webhook-cert\") on node \"addons-155517\" DevicePath \"\""
	Jul 04 01:15:01 addons-155517 kubelet[1544]: I0704 01:15:01.629690    1544 reconciler_common.go:289] "Volume detached for volume \"kube-api-access-4svgz\" (UniqueName: \"kubernetes.io/projected/bcfbe9c4-d293-405f-9f84-a6c0799c55ad-kube-api-access-4svgz\") on node \"addons-155517\" DevicePath \"\""
	Jul 04 01:15:01 addons-155517 kubelet[1544]: I0704 01:15:01.866557    1544 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcfbe9c4-d293-405f-9f84-a6c0799c55ad" path="/var/lib/kubelet/pods/bcfbe9c4-d293-405f-9f84-a6c0799c55ad/volumes"
	Jul 04 01:15:01 addons-155517 kubelet[1544]: E0704 01:15:01.954244    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"1e3b18d2f876c1bb4417aae722b878386b6073276d84972e04485183453e9f71\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:15:01 addons-155517 kubelet[1544]: E0704 01:15:01.966115    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"c76308010d4a2bc3392d3863a019bd68dabf02a0d5493bc5ffc088a3c75a6ae5\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:15:01 addons-155517 kubelet[1544]: E0704 01:15:01.981501    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"1efc7a263f7b0c7667fed461ab5d804c2c02949a5afc8becf4a51f6f87cb71d9\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:15:06 addons-155517 kubelet[1544]: E0704 01:15:06.970120    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"f0f0c2a322776354a70d0bb4cf691f759cb455194f456658db80160958aeb029\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:15:06 addons-155517 kubelet[1544]: E0704 01:15:06.988770    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"903fddeb70b0cdf7e4fe6fcac7bdba3495d0192edba39cf199fb1d4c8ce6c8fe\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:15:07 addons-155517 kubelet[1544]: E0704 01:15:07.004810    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"bbe64f99b2b2cc7df2f6d6c41e82e2e133795a348b6d4537b1296420ccc63753\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	
	
	==> storage-provisioner [33899ba7d59110e40a8409b27a7737ac3b4858d348229b496df42db6a119852b] <==
	I0704 01:09:20.859954       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0704 01:09:20.902425       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0704 01:09:20.902503       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0704 01:09:20.962139       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0704 01:09:20.962380       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_addons-155517_8b223e9f-4dc4-474e-ac36-2da123f117db!
	I0704 01:09:20.963772       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"58c57b6c-af21-4158-b30e-a900c384acaa", APIVersion:"v1", ResourceVersion:"568", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' addons-155517_8b223e9f-4dc4-474e-ac36-2da123f117db became leader
	I0704 01:09:21.062827       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_addons-155517_8b223e9f-4dc4-474e-ac36-2da123f117db!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p addons-155517 -n addons-155517
helpers_test.go:261: (dbg) Run:  kubectl --context addons-155517 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:285: <<< TestAddons/parallel/Ingress FAILED: end of post-mortem logs <<<
helpers_test.go:286: ---------------------/post-mortem---------------------------------
--- FAIL: TestAddons/parallel/Ingress (36.99s)

                                                
                                    
x
+
TestAddons/parallel/InspektorGadget (2122.19s)

                                                
                                                
=== RUN   TestAddons/parallel/InspektorGadget
=== PAUSE TestAddons/parallel/InspektorGadget

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/InspektorGadget
addons_test.go:840: (dbg) TestAddons/parallel/InspektorGadget: waiting 8m0s for pods matching "k8s-app=gadget" in namespace "gadget" ...
helpers_test.go:344: "gadget-9pgwd" [f1aa7304-3cea-42ac-b4db-60ed56cb1615] Running / Ready:ContainersNotReady (containers with unready status: [gadget]) / ContainersReady:ContainersNotReady (containers with unready status: [gadget])
addons_test.go:840: (dbg) TestAddons/parallel/InspektorGadget: k8s-app=gadget healthy within 6.004514652s
addons_test.go:843: (dbg) Run:  out/minikube-linux-arm64 addons disable inspektor-gadget -p addons-155517
addons_test.go:843: (dbg) Non-zero exit: out/minikube-linux-arm64 addons disable inspektor-gadget -p addons-155517: signal: killed (35m13.681838556s)
addons_test.go:844: failed to disable inspektor-gadget addon: args "out/minikube-linux-arm64 addons disable inspektor-gadget -p addons-155517" : signal: killed
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestAddons/parallel/InspektorGadget]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect addons-155517
helpers_test.go:235: (dbg) docker inspect addons-155517:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "3fdcd90a73a7ea02e66574c10f20fe7e4895345c14c83582a4b20690b97b6908",
	        "Created": "2024-07-04T01:08:36.303265492Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1196940,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-07-04T01:08:36.435418596Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:fe62b5a5301065dd92924d274286e0d1b2227c557eb51c213d07169631b2b3f7",
	        "ResolvConfPath": "/var/lib/docker/containers/3fdcd90a73a7ea02e66574c10f20fe7e4895345c14c83582a4b20690b97b6908/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/3fdcd90a73a7ea02e66574c10f20fe7e4895345c14c83582a4b20690b97b6908/hostname",
	        "HostsPath": "/var/lib/docker/containers/3fdcd90a73a7ea02e66574c10f20fe7e4895345c14c83582a4b20690b97b6908/hosts",
	        "LogPath": "/var/lib/docker/containers/3fdcd90a73a7ea02e66574c10f20fe7e4895345c14c83582a4b20690b97b6908/3fdcd90a73a7ea02e66574c10f20fe7e4895345c14c83582a4b20690b97b6908-json.log",
	        "Name": "/addons-155517",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "addons-155517:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "addons-155517",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/0c9e9525005bbc1d92b91271f9bbc278c449163313acc66d3f355c17ed992908-init/diff:/var/lib/docker/overlay2/04be1cfb4b9b173c47d5bff32a15bd2c62951348a7d8ba248dee1fc574bba292/diff",
	                "MergedDir": "/var/lib/docker/overlay2/0c9e9525005bbc1d92b91271f9bbc278c449163313acc66d3f355c17ed992908/merged",
	                "UpperDir": "/var/lib/docker/overlay2/0c9e9525005bbc1d92b91271f9bbc278c449163313acc66d3f355c17ed992908/diff",
	                "WorkDir": "/var/lib/docker/overlay2/0c9e9525005bbc1d92b91271f9bbc278c449163313acc66d3f355c17ed992908/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "addons-155517",
	                "Source": "/var/lib/docker/volumes/addons-155517/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "addons-155517",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "addons-155517",
	                "name.minikube.sigs.k8s.io": "addons-155517",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "00149764ac28081fa271644bd8abd017b3859409342eb0cd27d7072d3bc248ad",
	            "SandboxKey": "/var/run/docker/netns/00149764ac28",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33941"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33942"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33945"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33943"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33944"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "addons-155517": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "664a751e340faab9d5bafc3b0d537e0c69162e18e1e2ac12a6117fe790e76074",
	                    "EndpointID": "8e02106e4b600acb13911d96d2636d06ef6a5e9c2d13c8b3302477a5d63292bc",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "addons-155517",
	                        "3fdcd90a73a7"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p addons-155517 -n addons-155517
helpers_test.go:244: <<< TestAddons/parallel/InspektorGadget FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestAddons/parallel/InspektorGadget]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p addons-155517 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p addons-155517 logs -n 25: (1.474702195s)
helpers_test.go:252: TestAddons/parallel/InspektorGadget logs: 
-- stdout --
	
	==> Audit <==
	|---------|---------------------------------------------------------------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| Command |                                            Args                                             |        Profile         |  User   | Version |     Start Time      |      End Time       |
	|---------|---------------------------------------------------------------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| delete  | -p download-only-526823                                                                     | download-only-526823   | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| delete  | -p download-only-327632                                                                     | download-only-327632   | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| delete  | -p download-only-526823                                                                     | download-only-526823   | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| start   | --download-only -p                                                                          | download-docker-898650 | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC |                     |
	|         | download-docker-898650                                                                      |                        |         |         |                     |                     |
	|         | --alsologtostderr                                                                           |                        |         |         |                     |                     |
	|         | --driver=docker                                                                             |                        |         |         |                     |                     |
	|         | --container-runtime=containerd                                                              |                        |         |         |                     |                     |
	| delete  | -p download-docker-898650                                                                   | download-docker-898650 | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| start   | --download-only -p                                                                          | binary-mirror-236711   | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC |                     |
	|         | binary-mirror-236711                                                                        |                        |         |         |                     |                     |
	|         | --alsologtostderr                                                                           |                        |         |         |                     |                     |
	|         | --binary-mirror                                                                             |                        |         |         |                     |                     |
	|         | http://127.0.0.1:35737                                                                      |                        |         |         |                     |                     |
	|         | --driver=docker                                                                             |                        |         |         |                     |                     |
	|         | --container-runtime=containerd                                                              |                        |         |         |                     |                     |
	| delete  | -p binary-mirror-236711                                                                     | binary-mirror-236711   | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| addons  | enable dashboard -p                                                                         | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC |                     |
	|         | addons-155517                                                                               |                        |         |         |                     |                     |
	| addons  | disable dashboard -p                                                                        | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC |                     |
	|         | addons-155517                                                                               |                        |         |         |                     |                     |
	| start   | -p addons-155517 --wait=true                                                                | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:10 UTC |
	|         | --memory=4000 --alsologtostderr                                                             |                        |         |         |                     |                     |
	|         | --addons=registry                                                                           |                        |         |         |                     |                     |
	|         | --addons=metrics-server                                                                     |                        |         |         |                     |                     |
	|         | --addons=volumesnapshots                                                                    |                        |         |         |                     |                     |
	|         | --addons=csi-hostpath-driver                                                                |                        |         |         |                     |                     |
	|         | --addons=gcp-auth                                                                           |                        |         |         |                     |                     |
	|         | --addons=cloud-spanner                                                                      |                        |         |         |                     |                     |
	|         | --addons=inspektor-gadget                                                                   |                        |         |         |                     |                     |
	|         | --addons=storage-provisioner-rancher                                                        |                        |         |         |                     |                     |
	|         | --addons=nvidia-device-plugin                                                               |                        |         |         |                     |                     |
	|         | --addons=yakd --addons=volcano                                                              |                        |         |         |                     |                     |
	|         | --driver=docker                                                                             |                        |         |         |                     |                     |
	|         | --container-runtime=containerd                                                              |                        |         |         |                     |                     |
	|         | --addons=ingress                                                                            |                        |         |         |                     |                     |
	|         | --addons=ingress-dns                                                                        |                        |         |         |                     |                     |
	| addons  | enable headlamp                                                                             | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:10 UTC | 04 Jul 24 01:10 UTC |
	|         | -p addons-155517                                                                            |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1                                                                      |                        |         |         |                     |                     |
	| ip      | addons-155517 ip                                                                            | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:11 UTC | 04 Jul 24 01:11 UTC |
	| addons  | addons-155517 addons disable                                                                | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:11 UTC | 04 Jul 24 01:11 UTC |
	|         | registry --alsologtostderr                                                                  |                        |         |         |                     |                     |
	|         | -v=1                                                                                        |                        |         |         |                     |                     |
	| addons  | disable nvidia-device-plugin                                                                | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:11 UTC | 04 Jul 24 01:11 UTC |
	|         | -p addons-155517                                                                            |                        |         |         |                     |                     |
	| ssh     | addons-155517 ssh cat                                                                       | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:11 UTC | 04 Jul 24 01:11 UTC |
	|         | /opt/local-path-provisioner/pvc-c05b7d52-4c97-4ba9-8a04-478d46aaf85d_default_test-pvc/file1 |                        |         |         |                     |                     |
	| addons  | addons-155517 addons disable                                                                | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:11 UTC | 04 Jul 24 01:12 UTC |
	|         | storage-provisioner-rancher                                                                 |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1                                                                      |                        |         |         |                     |                     |
	| addons  | disable cloud-spanner -p                                                                    | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:12 UTC | 04 Jul 24 01:12 UTC |
	|         | addons-155517                                                                               |                        |         |         |                     |                     |
	| addons  | addons-155517 addons                                                                        | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:12 UTC | 04 Jul 24 01:12 UTC |
	|         | disable csi-hostpath-driver                                                                 |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1                                                                      |                        |         |         |                     |                     |
	| addons  | addons-155517 addons                                                                        | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:12 UTC | 04 Jul 24 01:12 UTC |
	|         | disable volumesnapshots                                                                     |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1                                                                      |                        |         |         |                     |                     |
	| addons  | disable inspektor-gadget -p                                                                 | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:12 UTC |                     |
	|         | addons-155517                                                                               |                        |         |         |                     |                     |
	| addons  | addons-155517 addons                                                                        | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:14 UTC | 04 Jul 24 01:14 UTC |
	|         | disable metrics-server                                                                      |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1                                                                      |                        |         |         |                     |                     |
	| ssh     | addons-155517 ssh curl -s                                                                   | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:14 UTC | 04 Jul 24 01:14 UTC |
	|         | http://127.0.0.1/ -H 'Host:                                                                 |                        |         |         |                     |                     |
	|         | nginx.example.com'                                                                          |                        |         |         |                     |                     |
	| ip      | addons-155517 ip                                                                            | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:14 UTC | 04 Jul 24 01:14 UTC |
	| addons  | addons-155517 addons disable                                                                | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:14 UTC | 04 Jul 24 01:14 UTC |
	|         | ingress-dns --alsologtostderr                                                               |                        |         |         |                     |                     |
	|         | -v=1                                                                                        |                        |         |         |                     |                     |
	| addons  | addons-155517 addons disable                                                                | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:14 UTC | 04 Jul 24 01:15 UTC |
	|         | ingress --alsologtostderr -v=1                                                              |                        |         |         |                     |                     |
	|---------|---------------------------------------------------------------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/07/04 01:08:12
	Running on machine: ip-172-31-30-239
	Binary: Built with gc go1.22.4 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0704 01:08:12.192271 1196445 out.go:291] Setting OutFile to fd 1 ...
	I0704 01:08:12.192443 1196445 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:08:12.192469 1196445 out.go:304] Setting ErrFile to fd 2...
	I0704 01:08:12.192488 1196445 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:08:12.192753 1196445 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
	I0704 01:08:12.193235 1196445 out.go:298] Setting JSON to false
	I0704 01:08:12.194161 1196445 start.go:129] hostinfo: {"hostname":"ip-172-31-30-239","uptime":24643,"bootTime":1720030650,"procs":172,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1064-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"92f46a7d-c249-4c12-924a-77f64874c910"}
	I0704 01:08:12.194241 1196445 start.go:139] virtualization:  
	I0704 01:08:12.196416 1196445 out.go:177] * [addons-155517] minikube v1.33.1 on Ubuntu 20.04 (arm64)
	I0704 01:08:12.199094 1196445 out.go:177]   - MINIKUBE_LOCATION=18859
	I0704 01:08:12.199240 1196445 notify.go:220] Checking for updates...
	I0704 01:08:12.202974 1196445 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0704 01:08:12.205218 1196445 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig
	I0704 01:08:12.207279 1196445 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube
	I0704 01:08:12.209429 1196445 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0704 01:08:12.211932 1196445 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0704 01:08:12.213938 1196445 driver.go:392] Setting default libvirt URI to qemu:///system
	I0704 01:08:12.234468 1196445 docker.go:122] docker version: linux-27.0.3:Docker Engine - Community
	I0704 01:08:12.234611 1196445 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 01:08:12.299096 1196445 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:29 OomKillDisable:true NGoroutines:44 SystemTime:2024-07-04 01:08:12.289472659 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 01:08:12.299211 1196445 docker.go:295] overlay module found
	I0704 01:08:12.301600 1196445 out.go:177] * Using the docker driver based on user configuration
	I0704 01:08:12.303741 1196445 start.go:297] selected driver: docker
	I0704 01:08:12.303757 1196445 start.go:901] validating driver "docker" against <nil>
	I0704 01:08:12.303770 1196445 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0704 01:08:12.304380 1196445 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 01:08:12.362176 1196445 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:29 OomKillDisable:true NGoroutines:44 SystemTime:2024-07-04 01:08:12.352995693 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 01:08:12.362343 1196445 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0704 01:08:12.362580 1196445 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0704 01:08:12.364680 1196445 out.go:177] * Using Docker driver with root privileges
	I0704 01:08:12.366418 1196445 cni.go:84] Creating CNI manager for ""
	I0704 01:08:12.366456 1196445 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0704 01:08:12.366471 1196445 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0704 01:08:12.366574 1196445 start.go:340] cluster config:
	{Name:addons-155517 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:addons-155517 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime
:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHA
uthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0704 01:08:12.368869 1196445 out.go:177] * Starting "addons-155517" primary control-plane node in "addons-155517" cluster
	I0704 01:08:12.371240 1196445 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0704 01:08:12.373736 1196445 out.go:177] * Pulling base image v0.0.44-1719972989-19184 ...
	I0704 01:08:12.376681 1196445 preload.go:132] Checking if preload exists for k8s version v1.30.2 and runtime containerd
	I0704 01:08:12.376741 1196445 preload.go:147] Found local preload: /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.30.2-containerd-overlay2-arm64.tar.lz4
	I0704 01:08:12.376753 1196445 cache.go:56] Caching tarball of preloaded images
	I0704 01:08:12.376764 1196445 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 in local docker daemon
	I0704 01:08:12.376836 1196445 preload.go:173] Found /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.30.2-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0704 01:08:12.376846 1196445 cache.go:59] Finished verifying existence of preloaded tar for v1.30.2 on containerd
	I0704 01:08:12.377194 1196445 profile.go:143] Saving config to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/config.json ...
	I0704 01:08:12.377218 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/config.json: {Name:mk1983fdaacaaa697964d44e7205145a658b8fe2 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:12.392599 1196445 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 to local cache
	I0704 01:08:12.392727 1196445 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 in local cache directory
	I0704 01:08:12.392756 1196445 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 in local cache directory, skipping pull
	I0704 01:08:12.392765 1196445 image.go:105] gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 exists in cache, skipping pull
	I0704 01:08:12.392773 1196445 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 as a tarball
	I0704 01:08:12.392779 1196445 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 from local cache
	I0704 01:08:28.987230 1196445 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 from cached tarball
	I0704 01:08:28.987269 1196445 cache.go:194] Successfully downloaded all kic artifacts
	I0704 01:08:28.987306 1196445 start.go:360] acquireMachinesLock for addons-155517: {Name:mk8b1bd096582ae2ddeb51ce97c96e8bd6c10c03 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0704 01:08:28.987436 1196445 start.go:364] duration metric: took 108.035µs to acquireMachinesLock for "addons-155517"
	I0704 01:08:28.987467 1196445 start.go:93] Provisioning new machine with config: &{Name:addons-155517 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:addons-155517 Namespace:default APIServerHAVIP: APIServerName:min
ikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:fa
lse CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0704 01:08:28.987567 1196445 start.go:125] createHost starting for "" (driver="docker")
	I0704 01:08:28.990219 1196445 out.go:204] * Creating docker container (CPUs=2, Memory=4000MB) ...
	I0704 01:08:28.990467 1196445 start.go:159] libmachine.API.Create for "addons-155517" (driver="docker")
	I0704 01:08:28.990494 1196445 client.go:168] LocalClient.Create starting
	I0704 01:08:28.990609 1196445 main.go:141] libmachine: Creating CA: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem
	I0704 01:08:29.608241 1196445 main.go:141] libmachine: Creating client certificate: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/cert.pem
	I0704 01:08:29.736436 1196445 cli_runner.go:164] Run: docker network inspect addons-155517 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0704 01:08:29.752779 1196445 cli_runner.go:211] docker network inspect addons-155517 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0704 01:08:29.752887 1196445 network_create.go:284] running [docker network inspect addons-155517] to gather additional debugging logs...
	I0704 01:08:29.752909 1196445 cli_runner.go:164] Run: docker network inspect addons-155517
	W0704 01:08:29.768706 1196445 cli_runner.go:211] docker network inspect addons-155517 returned with exit code 1
	I0704 01:08:29.768736 1196445 network_create.go:287] error running [docker network inspect addons-155517]: docker network inspect addons-155517: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network addons-155517 not found
	I0704 01:08:29.768749 1196445 network_create.go:289] output of [docker network inspect addons-155517]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network addons-155517 not found
	
	** /stderr **
	I0704 01:08:29.768847 1196445 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0704 01:08:29.784069 1196445 network.go:206] using free private subnet 192.168.49.0/24: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x4001c8c290}
	I0704 01:08:29.784107 1196445 network_create.go:124] attempt to create docker network addons-155517 192.168.49.0/24 with gateway 192.168.49.1 and MTU of 1500 ...
	I0704 01:08:29.784173 1196445 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.49.0/24 --gateway=192.168.49.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=addons-155517 addons-155517
	I0704 01:08:29.849610 1196445 network_create.go:108] docker network addons-155517 192.168.49.0/24 created
	I0704 01:08:29.849640 1196445 kic.go:121] calculated static IP "192.168.49.2" for the "addons-155517" container
	I0704 01:08:29.849732 1196445 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0704 01:08:29.864369 1196445 cli_runner.go:164] Run: docker volume create addons-155517 --label name.minikube.sigs.k8s.io=addons-155517 --label created_by.minikube.sigs.k8s.io=true
	I0704 01:08:29.881618 1196445 oci.go:103] Successfully created a docker volume addons-155517
	I0704 01:08:29.881710 1196445 cli_runner.go:164] Run: docker run --rm --name addons-155517-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-155517 --entrypoint /usr/bin/test -v addons-155517:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 -d /var/lib
	I0704 01:08:31.824188 1196445 cli_runner.go:217] Completed: docker run --rm --name addons-155517-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-155517 --entrypoint /usr/bin/test -v addons-155517:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 -d /var/lib: (1.942438119s)
	I0704 01:08:31.824231 1196445 oci.go:107] Successfully prepared a docker volume addons-155517
	I0704 01:08:31.824258 1196445 preload.go:132] Checking if preload exists for k8s version v1.30.2 and runtime containerd
	I0704 01:08:31.824278 1196445 kic.go:194] Starting extracting preloaded images to volume ...
	I0704 01:08:31.824367 1196445 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.30.2-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v addons-155517:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 -I lz4 -xf /preloaded.tar -C /extractDir
	I0704 01:08:36.228874 1196445 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.30.2-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v addons-155517:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 -I lz4 -xf /preloaded.tar -C /extractDir: (4.404462021s)
	I0704 01:08:36.228908 1196445 kic.go:203] duration metric: took 4.404626194s to extract preloaded images to volume ...
	W0704 01:08:36.229067 1196445 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0704 01:08:36.229179 1196445 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0704 01:08:36.288715 1196445 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname addons-155517 --name addons-155517 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-155517 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=addons-155517 --network addons-155517 --ip 192.168.49.2 --volume addons-155517:/var --security-opt apparmor=unconfined --memory=4000mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1
	I0704 01:08:36.596197 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Running}}
	I0704 01:08:36.616187 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:08:36.634979 1196445 cli_runner.go:164] Run: docker exec addons-155517 stat /var/lib/dpkg/alternatives/iptables
	I0704 01:08:36.703846 1196445 oci.go:144] the created container "addons-155517" has a running status.
	I0704 01:08:36.703874 1196445 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa...
	I0704 01:08:37.190789 1196445 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0704 01:08:37.210820 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:08:37.232784 1196445 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0704 01:08:37.232803 1196445 kic_runner.go:114] Args: [docker exec --privileged addons-155517 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0704 01:08:37.300309 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:08:37.323611 1196445 machine.go:94] provisionDockerMachine start ...
	I0704 01:08:37.323700 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:37.348214 1196445 main.go:141] libmachine: Using SSH client type: native
	I0704 01:08:37.348477 1196445 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e2ba0] 0x3e5400 <nil>  [] 0s} 127.0.0.1 33941 <nil> <nil>}
	I0704 01:08:37.348487 1196445 main.go:141] libmachine: About to run SSH command:
	hostname
	I0704 01:08:37.530824 1196445 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-155517
	
	I0704 01:08:37.530887 1196445 ubuntu.go:169] provisioning hostname "addons-155517"
	I0704 01:08:37.530986 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:37.553591 1196445 main.go:141] libmachine: Using SSH client type: native
	I0704 01:08:37.553832 1196445 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e2ba0] 0x3e5400 <nil>  [] 0s} 127.0.0.1 33941 <nil> <nil>}
	I0704 01:08:37.553843 1196445 main.go:141] libmachine: About to run SSH command:
	sudo hostname addons-155517 && echo "addons-155517" | sudo tee /etc/hostname
	I0704 01:08:37.714152 1196445 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-155517
	
	I0704 01:08:37.714277 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:37.740011 1196445 main.go:141] libmachine: Using SSH client type: native
	I0704 01:08:37.740256 1196445 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e2ba0] 0x3e5400 <nil>  [] 0s} 127.0.0.1 33941 <nil> <nil>}
	I0704 01:08:37.740273 1196445 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\saddons-155517' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 addons-155517/g' /etc/hosts;
				else 
					echo '127.0.1.1 addons-155517' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0704 01:08:37.884114 1196445 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0704 01:08:37.884141 1196445 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/18859-1190282/.minikube CaCertPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/18859-1190282/.minikube}
	I0704 01:08:37.884170 1196445 ubuntu.go:177] setting up certificates
	I0704 01:08:37.884179 1196445 provision.go:84] configureAuth start
	I0704 01:08:37.884270 1196445 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-155517
	I0704 01:08:37.903878 1196445 provision.go:143] copyHostCerts
	I0704 01:08:37.903989 1196445 exec_runner.go:151] cp: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.pem (1078 bytes)
	I0704 01:08:37.904149 1196445 exec_runner.go:151] cp: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/18859-1190282/.minikube/cert.pem (1123 bytes)
	I0704 01:08:37.904212 1196445 exec_runner.go:151] cp: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/18859-1190282/.minikube/key.pem (1675 bytes)
	I0704 01:08:37.904262 1196445 provision.go:117] generating server cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca-key.pem org=jenkins.addons-155517 san=[127.0.0.1 192.168.49.2 addons-155517 localhost minikube]
	I0704 01:08:38.603884 1196445 provision.go:177] copyRemoteCerts
	I0704 01:08:38.603955 1196445 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0704 01:08:38.604000 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:38.623978 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:08:38.720166 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0704 01:08:38.743354 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0704 01:08:38.766785 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0704 01:08:38.789642 1196445 provision.go:87] duration metric: took 905.449187ms to configureAuth
	I0704 01:08:38.789672 1196445 ubuntu.go:193] setting minikube options for container-runtime
	I0704 01:08:38.789856 1196445 config.go:182] Loaded profile config "addons-155517": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 01:08:38.789871 1196445 machine.go:97] duration metric: took 1.466242644s to provisionDockerMachine
	I0704 01:08:38.789879 1196445 client.go:171] duration metric: took 9.799379567s to LocalClient.Create
	I0704 01:08:38.789892 1196445 start.go:167] duration metric: took 9.799425482s to libmachine.API.Create "addons-155517"
	I0704 01:08:38.789902 1196445 start.go:293] postStartSetup for "addons-155517" (driver="docker")
	I0704 01:08:38.789912 1196445 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0704 01:08:38.789968 1196445 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0704 01:08:38.790010 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:38.806758 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:08:38.904686 1196445 ssh_runner.go:195] Run: cat /etc/os-release
	I0704 01:08:38.908018 1196445 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0704 01:08:38.908055 1196445 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0704 01:08:38.908066 1196445 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0704 01:08:38.908082 1196445 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0704 01:08:38.908093 1196445 filesync.go:126] Scanning /home/jenkins/minikube-integration/18859-1190282/.minikube/addons for local assets ...
	I0704 01:08:38.908162 1196445 filesync.go:126] Scanning /home/jenkins/minikube-integration/18859-1190282/.minikube/files for local assets ...
	I0704 01:08:38.908190 1196445 start.go:296] duration metric: took 118.282456ms for postStartSetup
	I0704 01:08:38.908511 1196445 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-155517
	I0704 01:08:38.923846 1196445 profile.go:143] Saving config to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/config.json ...
	I0704 01:08:38.924136 1196445 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0704 01:08:38.924185 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:38.940968 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:08:39.036923 1196445 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0704 01:08:39.042092 1196445 start.go:128] duration metric: took 10.054507303s to createHost
	I0704 01:08:39.042119 1196445 start.go:83] releasing machines lock for "addons-155517", held for 10.054668801s
	I0704 01:08:39.042203 1196445 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-155517
	I0704 01:08:39.059237 1196445 ssh_runner.go:195] Run: cat /version.json
	I0704 01:08:39.059305 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:39.059637 1196445 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0704 01:08:39.059700 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:39.080747 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:08:39.097096 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:08:39.175033 1196445 ssh_runner.go:195] Run: systemctl --version
	I0704 01:08:39.348341 1196445 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0704 01:08:39.352559 1196445 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0704 01:08:39.378748 1196445 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0704 01:08:39.378866 1196445 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%!p(MISSING), " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0704 01:08:39.409039 1196445 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0704 01:08:39.409077 1196445 start.go:495] detecting cgroup driver to use...
	I0704 01:08:39.409110 1196445 detect.go:196] detected "cgroupfs" cgroup driver on host os
	I0704 01:08:39.409164 1196445 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0704 01:08:39.422321 1196445 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0704 01:08:39.434248 1196445 docker.go:217] disabling cri-docker service (if available) ...
	I0704 01:08:39.434344 1196445 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0704 01:08:39.448530 1196445 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0704 01:08:39.463578 1196445 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0704 01:08:39.552252 1196445 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0704 01:08:39.641041 1196445 docker.go:233] disabling docker service ...
	I0704 01:08:39.641108 1196445 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0704 01:08:39.660807 1196445 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0704 01:08:39.673868 1196445 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0704 01:08:39.763520 1196445 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0704 01:08:39.851995 1196445 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0704 01:08:39.863472 1196445 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %!s(MISSING) "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0704 01:08:39.880705 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.9"|' /etc/containerd/config.toml"
	I0704 01:08:39.890751 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0704 01:08:39.901368 1196445 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0704 01:08:39.901467 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0704 01:08:39.911715 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0704 01:08:39.921830 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0704 01:08:39.931688 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0704 01:08:39.941303 1196445 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0704 01:08:39.950585 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0704 01:08:39.960278 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0704 01:08:39.972286 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0704 01:08:39.982106 1196445 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0704 01:08:39.991352 1196445 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0704 01:08:39.999917 1196445 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0704 01:08:40.099343 1196445 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0704 01:08:40.243998 1196445 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0704 01:08:40.244098 1196445 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0704 01:08:40.247881 1196445 start.go:563] Will wait 60s for crictl version
	I0704 01:08:40.247950 1196445 ssh_runner.go:195] Run: which crictl
	I0704 01:08:40.251327 1196445 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0704 01:08:40.296521 1196445 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.18
	RuntimeApiVersion:  v1
	I0704 01:08:40.296607 1196445 ssh_runner.go:195] Run: containerd --version
	I0704 01:08:40.318696 1196445 ssh_runner.go:195] Run: containerd --version
	I0704 01:08:40.343842 1196445 out.go:177] * Preparing Kubernetes v1.30.2 on containerd 1.7.18 ...
	I0704 01:08:40.345454 1196445 cli_runner.go:164] Run: docker network inspect addons-155517 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0704 01:08:40.361140 1196445 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0704 01:08:40.364944 1196445 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0704 01:08:40.376129 1196445 kubeadm.go:877] updating cluster {Name:addons-155517 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:addons-155517 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNa
mes:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Cus
tomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0704 01:08:40.376264 1196445 preload.go:132] Checking if preload exists for k8s version v1.30.2 and runtime containerd
	I0704 01:08:40.376340 1196445 ssh_runner.go:195] Run: sudo crictl images --output json
	I0704 01:08:40.412122 1196445 containerd.go:627] all images are preloaded for containerd runtime.
	I0704 01:08:40.412146 1196445 containerd.go:534] Images already preloaded, skipping extraction
	I0704 01:08:40.412211 1196445 ssh_runner.go:195] Run: sudo crictl images --output json
	I0704 01:08:40.448869 1196445 containerd.go:627] all images are preloaded for containerd runtime.
	I0704 01:08:40.448892 1196445 cache_images.go:84] Images are preloaded, skipping loading
	I0704 01:08:40.448900 1196445 kubeadm.go:928] updating node { 192.168.49.2 8443 v1.30.2 containerd true true} ...
	I0704 01:08:40.449001 1196445 kubeadm.go:940] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.30.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=addons-155517 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.30.2 ClusterName:addons-155517 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0704 01:08:40.449071 1196445 ssh_runner.go:195] Run: sudo crictl info
	I0704 01:08:40.489171 1196445 cni.go:84] Creating CNI manager for ""
	I0704 01:08:40.489196 1196445 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0704 01:08:40.489206 1196445 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0704 01:08:40.489228 1196445 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.30.2 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:addons-155517 NodeName:addons-155517 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc
/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0704 01:08:40.489399 1196445 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "addons-155517"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.30.2
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%!"(MISSING)
	  nodefs.inodesFree: "0%!"(MISSING)
	  imagefs.available: "0%!"(MISSING)
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0704 01:08:40.489469 1196445 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.30.2
	I0704 01:08:40.498054 1196445 binaries.go:44] Found k8s binaries, skipping transfer
	I0704 01:08:40.498164 1196445 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0704 01:08:40.506560 1196445 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (317 bytes)
	I0704 01:08:40.524624 1196445 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0704 01:08:40.542714 1196445 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2167 bytes)
	I0704 01:08:40.560914 1196445 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0704 01:08:40.564512 1196445 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0704 01:08:40.575356 1196445 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0704 01:08:40.663366 1196445 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0704 01:08:40.678704 1196445 certs.go:68] Setting up /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517 for IP: 192.168.49.2
	I0704 01:08:40.678774 1196445 certs.go:194] generating shared ca certs ...
	I0704 01:08:40.678804 1196445 certs.go:226] acquiring lock for ca certs: {Name:mk4f0dbc18506f7ee4fcbc10f124348dd208ffc0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:40.678969 1196445 certs.go:240] generating "minikubeCA" ca cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.key
	I0704 01:08:41.197423 1196445 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt ...
	I0704 01:08:41.197454 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt: {Name:mkb28c983e13ee826bf585de68c8dd48b64194c2 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:41.197647 1196445 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.key ...
	I0704 01:08:41.197660 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.key: {Name:mk0f96934eb2f8ea5b78e7bab1383e47ca4c47bf Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:41.197743 1196445 certs.go:240] generating "proxyClientCA" ca cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.key
	I0704 01:08:41.716569 1196445 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.crt ...
	I0704 01:08:41.716608 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.crt: {Name:mk49decf76e004afe576981c44baf46e246e42aa Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:41.716807 1196445 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.key ...
	I0704 01:08:41.716820 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.key: {Name:mkfa598677724bedcdda29a6fc68fc0dff6ee016 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:41.717561 1196445 certs.go:256] generating profile certs ...
	I0704 01:08:41.717627 1196445 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.key
	I0704 01:08:41.717645 1196445 crypto.go:68] Generating cert /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt with IP's: []
	I0704 01:08:42.174023 1196445 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt ...
	I0704 01:08:42.174061 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: {Name:mkc35913a7eb1db2825718f6dc2b65e7745aa5c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:42.174290 1196445 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.key ...
	I0704 01:08:42.174304 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.key: {Name:mkfadadfe13d73634e029f31163c920115daacae Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:42.174396 1196445 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.key.545f2af5
	I0704 01:08:42.174417 1196445 crypto.go:68] Generating cert /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.crt.545f2af5 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2]
	I0704 01:08:42.578819 1196445 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.crt.545f2af5 ...
	I0704 01:08:42.578850 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.crt.545f2af5: {Name:mk7ecc68d7a90c858109ff8e83b26bc005452b6f Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:42.579635 1196445 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.key.545f2af5 ...
	I0704 01:08:42.579664 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.key.545f2af5: {Name:mk9daa218eca8496a54e034db67849e3cbe7a05c Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:42.580299 1196445 certs.go:381] copying /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.crt.545f2af5 -> /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.crt
	I0704 01:08:42.580398 1196445 certs.go:385] copying /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.key.545f2af5 -> /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.key
	I0704 01:08:42.580459 1196445 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.key
	I0704 01:08:42.580484 1196445 crypto.go:68] Generating cert /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.crt with IP's: []
	I0704 01:08:42.984339 1196445 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.crt ...
	I0704 01:08:42.984371 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.crt: {Name:mk9654e604aa4538cb254b142d3cacdd3534a634 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:42.984566 1196445 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.key ...
	I0704 01:08:42.984584 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.key: {Name:mkcae6cd9331f55e5fdb956fdd3942c8d213675d Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:42.984770 1196445 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca-key.pem (1679 bytes)
	I0704 01:08:42.984813 1196445 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem (1078 bytes)
	I0704 01:08:42.984842 1196445 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/cert.pem (1123 bytes)
	I0704 01:08:42.984870 1196445 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/key.pem (1675 bytes)
	I0704 01:08:42.985445 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0704 01:08:43.011174 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1671 bytes)
	I0704 01:08:43.035588 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0704 01:08:43.061964 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0704 01:08:43.086326 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1419 bytes)
	I0704 01:08:43.110608 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0704 01:08:43.134625 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0704 01:08:43.157943 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0704 01:08:43.181717 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0704 01:08:43.206975 1196445 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0704 01:08:43.224889 1196445 ssh_runner.go:195] Run: openssl version
	I0704 01:08:43.230415 1196445 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0704 01:08:43.239834 1196445 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0704 01:08:43.243271 1196445 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Jul  4 01:08 /usr/share/ca-certificates/minikubeCA.pem
	I0704 01:08:43.243394 1196445 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0704 01:08:43.250079 1196445 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0704 01:08:43.259384 1196445 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0704 01:08:43.262831 1196445 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0704 01:08:43.262919 1196445 kubeadm.go:391] StartCluster: {Name:addons-155517 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:addons-155517 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames
:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Custom
QemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0704 01:08:43.263009 1196445 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0704 01:08:43.263066 1196445 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0704 01:08:43.303591 1196445 cri.go:89] found id: ""
	I0704 01:08:43.303662 1196445 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0704 01:08:43.312342 1196445 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0704 01:08:43.321279 1196445 kubeadm.go:213] ignoring SystemVerification for kubeadm because of docker driver
	I0704 01:08:43.321369 1196445 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0704 01:08:43.329958 1196445 kubeadm.go:154] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0704 01:08:43.330017 1196445 kubeadm.go:156] found existing configuration files:
	
	I0704 01:08:43.330077 1196445 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0704 01:08:43.338479 1196445 kubeadm.go:162] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0704 01:08:43.338539 1196445 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0704 01:08:43.346581 1196445 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0704 01:08:43.355511 1196445 kubeadm.go:162] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0704 01:08:43.355614 1196445 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0704 01:08:43.363887 1196445 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0704 01:08:43.372345 1196445 kubeadm.go:162] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0704 01:08:43.372436 1196445 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0704 01:08:43.380824 1196445 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0704 01:08:43.389491 1196445 kubeadm.go:162] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0704 01:08:43.389581 1196445 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0704 01:08:43.398028 1196445 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.30.2:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0704 01:08:43.442669 1196445 kubeadm.go:309] [init] Using Kubernetes version: v1.30.2
	I0704 01:08:43.442729 1196445 kubeadm.go:309] [preflight] Running pre-flight checks
	I0704 01:08:43.482986 1196445 kubeadm.go:309] [preflight] The system verification failed. Printing the output from the verification:
	I0704 01:08:43.483062 1196445 kubeadm.go:309] KERNEL_VERSION: 5.15.0-1064-aws
	I0704 01:08:43.483101 1196445 kubeadm.go:309] OS: Linux
	I0704 01:08:43.483149 1196445 kubeadm.go:309] CGROUPS_CPU: enabled
	I0704 01:08:43.483199 1196445 kubeadm.go:309] CGROUPS_CPUACCT: enabled
	I0704 01:08:43.483248 1196445 kubeadm.go:309] CGROUPS_CPUSET: enabled
	I0704 01:08:43.483314 1196445 kubeadm.go:309] CGROUPS_DEVICES: enabled
	I0704 01:08:43.483364 1196445 kubeadm.go:309] CGROUPS_FREEZER: enabled
	I0704 01:08:43.483415 1196445 kubeadm.go:309] CGROUPS_MEMORY: enabled
	I0704 01:08:43.483465 1196445 kubeadm.go:309] CGROUPS_PIDS: enabled
	I0704 01:08:43.483531 1196445 kubeadm.go:309] CGROUPS_HUGETLB: enabled
	I0704 01:08:43.483582 1196445 kubeadm.go:309] CGROUPS_BLKIO: enabled
	I0704 01:08:43.557602 1196445 kubeadm.go:309] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0704 01:08:43.557712 1196445 kubeadm.go:309] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0704 01:08:43.557806 1196445 kubeadm.go:309] [preflight] You can also perform this action in beforehand using 'kubeadm config images pull'
	I0704 01:08:43.820944 1196445 kubeadm.go:309] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0704 01:08:43.824525 1196445 out.go:204]   - Generating certificates and keys ...
	I0704 01:08:43.824630 1196445 kubeadm.go:309] [certs] Using existing ca certificate authority
	I0704 01:08:43.824724 1196445 kubeadm.go:309] [certs] Using existing apiserver certificate and key on disk
	I0704 01:08:44.229464 1196445 kubeadm.go:309] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0704 01:08:44.869561 1196445 kubeadm.go:309] [certs] Generating "front-proxy-ca" certificate and key
	I0704 01:08:44.995121 1196445 kubeadm.go:309] [certs] Generating "front-proxy-client" certificate and key
	I0704 01:08:45.945765 1196445 kubeadm.go:309] [certs] Generating "etcd/ca" certificate and key
	I0704 01:08:46.426406 1196445 kubeadm.go:309] [certs] Generating "etcd/server" certificate and key
	I0704 01:08:46.426727 1196445 kubeadm.go:309] [certs] etcd/server serving cert is signed for DNS names [addons-155517 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0704 01:08:46.568753 1196445 kubeadm.go:309] [certs] Generating "etcd/peer" certificate and key
	I0704 01:08:46.569065 1196445 kubeadm.go:309] [certs] etcd/peer serving cert is signed for DNS names [addons-155517 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0704 01:08:46.824637 1196445 kubeadm.go:309] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0704 01:08:47.178188 1196445 kubeadm.go:309] [certs] Generating "apiserver-etcd-client" certificate and key
	I0704 01:08:47.938770 1196445 kubeadm.go:309] [certs] Generating "sa" key and public key
	I0704 01:08:47.939012 1196445 kubeadm.go:309] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0704 01:08:48.532992 1196445 kubeadm.go:309] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0704 01:08:48.700658 1196445 kubeadm.go:309] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0704 01:08:49.343140 1196445 kubeadm.go:309] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0704 01:08:50.301229 1196445 kubeadm.go:309] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0704 01:08:50.970394 1196445 kubeadm.go:309] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0704 01:08:50.971331 1196445 kubeadm.go:309] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0704 01:08:50.976216 1196445 kubeadm.go:309] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0704 01:08:50.978409 1196445 out.go:204]   - Booting up control plane ...
	I0704 01:08:50.978513 1196445 kubeadm.go:309] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0704 01:08:50.978594 1196445 kubeadm.go:309] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0704 01:08:50.979204 1196445 kubeadm.go:309] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0704 01:08:50.989631 1196445 kubeadm.go:309] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0704 01:08:50.991151 1196445 kubeadm.go:309] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0704 01:08:50.991199 1196445 kubeadm.go:309] [kubelet-start] Starting the kubelet
	I0704 01:08:51.101564 1196445 kubeadm.go:309] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0704 01:08:51.101665 1196445 kubeadm.go:309] [kubelet-check] Waiting for a healthy kubelet. This can take up to 4m0s
	I0704 01:08:52.602456 1196445 kubeadm.go:309] [kubelet-check] The kubelet is healthy after 1.501248538s
	I0704 01:08:52.602548 1196445 kubeadm.go:309] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0704 01:08:59.104335 1196445 kubeadm.go:309] [api-check] The API server is healthy after 6.50185707s
	I0704 01:08:59.129403 1196445 kubeadm.go:309] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0704 01:08:59.143665 1196445 kubeadm.go:309] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0704 01:08:59.169656 1196445 kubeadm.go:309] [upload-certs] Skipping phase. Please see --upload-certs
	I0704 01:08:59.169845 1196445 kubeadm.go:309] [mark-control-plane] Marking the node addons-155517 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0704 01:08:59.181202 1196445 kubeadm.go:309] [bootstrap-token] Using token: 7nkvt2.ozp2nts9dnthdvog
	I0704 01:08:59.182924 1196445 out.go:204]   - Configuring RBAC rules ...
	I0704 01:08:59.183052 1196445 kubeadm.go:309] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0704 01:08:59.189973 1196445 kubeadm.go:309] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0704 01:08:59.198030 1196445 kubeadm.go:309] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0704 01:08:59.202188 1196445 kubeadm.go:309] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0704 01:08:59.205935 1196445 kubeadm.go:309] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0704 01:08:59.209870 1196445 kubeadm.go:309] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0704 01:08:59.517980 1196445 kubeadm.go:309] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0704 01:08:59.966077 1196445 kubeadm.go:309] [addons] Applied essential addon: CoreDNS
	I0704 01:09:00.515905 1196445 kubeadm.go:309] [addons] Applied essential addon: kube-proxy
	I0704 01:09:00.516982 1196445 kubeadm.go:309] 
	I0704 01:09:00.517083 1196445 kubeadm.go:309] Your Kubernetes control-plane has initialized successfully!
	I0704 01:09:00.517098 1196445 kubeadm.go:309] 
	I0704 01:09:00.517185 1196445 kubeadm.go:309] To start using your cluster, you need to run the following as a regular user:
	I0704 01:09:00.517190 1196445 kubeadm.go:309] 
	I0704 01:09:00.517215 1196445 kubeadm.go:309]   mkdir -p $HOME/.kube
	I0704 01:09:00.517272 1196445 kubeadm.go:309]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0704 01:09:00.517328 1196445 kubeadm.go:309]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0704 01:09:00.517334 1196445 kubeadm.go:309] 
	I0704 01:09:00.517385 1196445 kubeadm.go:309] Alternatively, if you are the root user, you can run:
	I0704 01:09:00.517390 1196445 kubeadm.go:309] 
	I0704 01:09:00.517436 1196445 kubeadm.go:309]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0704 01:09:00.517442 1196445 kubeadm.go:309] 
	I0704 01:09:00.517492 1196445 kubeadm.go:309] You should now deploy a pod network to the cluster.
	I0704 01:09:00.517564 1196445 kubeadm.go:309] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0704 01:09:00.517632 1196445 kubeadm.go:309]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0704 01:09:00.517637 1196445 kubeadm.go:309] 
	I0704 01:09:00.517719 1196445 kubeadm.go:309] You can now join any number of control-plane nodes by copying certificate authorities
	I0704 01:09:00.517792 1196445 kubeadm.go:309] and service account keys on each node and then running the following as root:
	I0704 01:09:00.517797 1196445 kubeadm.go:309] 
	I0704 01:09:00.517884 1196445 kubeadm.go:309]   kubeadm join control-plane.minikube.internal:8443 --token 7nkvt2.ozp2nts9dnthdvog \
	I0704 01:09:00.517985 1196445 kubeadm.go:309] 	--discovery-token-ca-cert-hash sha256:5b6b816aa61ec76ffa7acb157372c74648707423ad3df4db41b9bf88dbe1edfa \
	I0704 01:09:00.518011 1196445 kubeadm.go:309] 	--control-plane 
	I0704 01:09:00.518017 1196445 kubeadm.go:309] 
	I0704 01:09:00.518099 1196445 kubeadm.go:309] Then you can join any number of worker nodes by running the following on each as root:
	I0704 01:09:00.518104 1196445 kubeadm.go:309] 
	I0704 01:09:00.518183 1196445 kubeadm.go:309] kubeadm join control-plane.minikube.internal:8443 --token 7nkvt2.ozp2nts9dnthdvog \
	I0704 01:09:00.518281 1196445 kubeadm.go:309] 	--discovery-token-ca-cert-hash sha256:5b6b816aa61ec76ffa7acb157372c74648707423ad3df4db41b9bf88dbe1edfa 
	I0704 01:09:00.520643 1196445 kubeadm.go:309] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1064-aws\n", err: exit status 1
	I0704 01:09:00.520759 1196445 kubeadm.go:309] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0704 01:09:00.520778 1196445 cni.go:84] Creating CNI manager for ""
	I0704 01:09:00.520793 1196445 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0704 01:09:00.523121 1196445 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0704 01:09:00.525213 1196445 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0704 01:09:00.530132 1196445 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.30.2/kubectl ...
	I0704 01:09:00.530152 1196445 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2438 bytes)
	I0704 01:09:00.549314 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0704 01:09:00.814623 1196445 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0704 01:09:00.814727 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:00.814782 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes addons-155517 minikube.k8s.io/updated_at=2024_07_04T01_09_00_0700 minikube.k8s.io/version=v1.33.1 minikube.k8s.io/commit=b003e6195fd8aae2e8757a7316e2960f465339c8 minikube.k8s.io/name=addons-155517 minikube.k8s.io/primary=true
	I0704 01:09:00.972427 1196445 ops.go:34] apiserver oom_adj: -16
	I0704 01:09:00.972641 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:01.472756 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:01.972695 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:02.472885 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:02.973558 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:03.473435 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:03.973670 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:04.473303 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:04.973640 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:05.473357 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:05.973493 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:06.473432 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:06.972686 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:07.472874 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:07.973677 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:08.472770 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:08.973060 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:09.473021 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:09.972794 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:10.472869 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:10.973078 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:11.473672 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:11.972769 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:12.473575 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:12.972752 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:13.473123 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:13.973407 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:14.472957 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:14.571895 1196445 kubeadm.go:1107] duration metric: took 13.757267057s to wait for elevateKubeSystemPrivileges
	W0704 01:09:14.571932 1196445 kubeadm.go:286] apiserver tunnel failed: apiserver port not set
	I0704 01:09:14.571940 1196445 kubeadm.go:393] duration metric: took 31.309027137s to StartCluster
	I0704 01:09:14.571956 1196445 settings.go:142] acquiring lock: {Name:mk6d49b718ddc65478a80e50434df6064c31eee4 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:09:14.572073 1196445 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/18859-1190282/kubeconfig
	I0704 01:09:14.572478 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/kubeconfig: {Name:mkcb1dc68318dea0090dbb67854ab85e2d8d0252 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:09:14.572667 1196445 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0704 01:09:14.572807 1196445 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.30.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0704 01:09:14.573070 1196445 config.go:182] Loaded profile config "addons-155517": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 01:09:14.573109 1196445 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:true csi-hostpath-driver:true dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:true gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:true ingress-dns:true inspektor-gadget:true istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:true nvidia-device-plugin:true nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:true registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:true volcano:true volumesnapshots:true yakd:true]
	I0704 01:09:14.573210 1196445 addons.go:69] Setting yakd=true in profile "addons-155517"
	I0704 01:09:14.573236 1196445 addons.go:234] Setting addon yakd=true in "addons-155517"
	I0704 01:09:14.573263 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.573732 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.574149 1196445 addons.go:69] Setting metrics-server=true in profile "addons-155517"
	I0704 01:09:14.574180 1196445 addons.go:234] Setting addon metrics-server=true in "addons-155517"
	I0704 01:09:14.574206 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.574253 1196445 addons.go:69] Setting nvidia-device-plugin=true in profile "addons-155517"
	I0704 01:09:14.574275 1196445 addons.go:234] Setting addon nvidia-device-plugin=true in "addons-155517"
	I0704 01:09:14.574297 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.574634 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.574730 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.577258 1196445 addons.go:69] Setting registry=true in profile "addons-155517"
	I0704 01:09:14.577394 1196445 addons.go:234] Setting addon registry=true in "addons-155517"
	I0704 01:09:14.577449 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.579535 1196445 addons.go:69] Setting storage-provisioner=true in profile "addons-155517"
	I0704 01:09:14.579602 1196445 addons.go:234] Setting addon storage-provisioner=true in "addons-155517"
	I0704 01:09:14.579640 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.580179 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580252 1196445 addons.go:69] Setting cloud-spanner=true in profile "addons-155517"
	I0704 01:09:14.580300 1196445 addons.go:234] Setting addon cloud-spanner=true in "addons-155517"
	I0704 01:09:14.591703 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.592237 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580425 1196445 addons.go:69] Setting csi-hostpath-driver=true in profile "addons-155517"
	I0704 01:09:14.597313 1196445 addons.go:234] Setting addon csi-hostpath-driver=true in "addons-155517"
	I0704 01:09:14.597353 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.597793 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580434 1196445 addons.go:69] Setting default-storageclass=true in profile "addons-155517"
	I0704 01:09:14.598119 1196445 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "addons-155517"
	I0704 01:09:14.598447 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580438 1196445 addons.go:69] Setting gcp-auth=true in profile "addons-155517"
	I0704 01:09:14.618488 1196445 mustload.go:65] Loading cluster: addons-155517
	I0704 01:09:14.618673 1196445 config.go:182] Loaded profile config "addons-155517": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 01:09:14.618933 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580441 1196445 addons.go:69] Setting ingress=true in profile "addons-155517"
	I0704 01:09:14.633852 1196445 addons.go:234] Setting addon ingress=true in "addons-155517"
	I0704 01:09:14.633911 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.634367 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580444 1196445 addons.go:69] Setting ingress-dns=true in profile "addons-155517"
	I0704 01:09:14.659280 1196445 addons.go:234] Setting addon ingress-dns=true in "addons-155517"
	I0704 01:09:14.659332 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.659837 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580448 1196445 addons.go:69] Setting inspektor-gadget=true in profile "addons-155517"
	I0704 01:09:14.683730 1196445 addons.go:234] Setting addon inspektor-gadget=true in "addons-155517"
	I0704 01:09:14.683777 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.684227 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.698704 1196445 out.go:177]   - Using image nvcr.io/nvidia/k8s-device-plugin:v0.15.1
	I0704 01:09:14.701274 1196445 out.go:177]   - Using image gcr.io/cloud-spanner-emulator/emulator:1.5.17
	I0704 01:09:14.580702 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.719181 1196445 out.go:177]   - Using image docker.io/marcnuri/yakd:0.0.5
	I0704 01:09:14.580712 1196445 out.go:177] * Verifying Kubernetes components...
	I0704 01:09:14.721032 1196445 addons.go:431] installing /etc/kubernetes/addons/yakd-ns.yaml
	I0704 01:09:14.721051 1196445 ssh_runner.go:362] scp yakd/yakd-ns.yaml --> /etc/kubernetes/addons/yakd-ns.yaml (171 bytes)
	I0704 01:09:14.721122 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.580911 1196445 addons.go:69] Setting storage-provisioner-rancher=true in profile "addons-155517"
	I0704 01:09:14.580918 1196445 addons.go:69] Setting volcano=true in profile "addons-155517"
	I0704 01:09:14.723014 1196445 addons.go:234] Setting addon volcano=true in "addons-155517"
	I0704 01:09:14.580922 1196445 addons.go:69] Setting volumesnapshots=true in profile "addons-155517"
	I0704 01:09:14.723103 1196445 addons.go:234] Setting addon volumesnapshots=true in "addons-155517"
	I0704 01:09:14.723131 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.723415 1196445 addons.go:431] installing /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0704 01:09:14.723429 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/nvidia-device-plugin.yaml (1966 bytes)
	I0704 01:09:14.723497 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.729712 1196445 addons_storage_classes.go:33] enableOrDisableStorageClasses storage-provisioner-rancher=true on "addons-155517"
	I0704 01:09:14.730053 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.760655 1196445 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0704 01:09:14.760879 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.761414 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.767033 1196445 addons.go:431] installing /etc/kubernetes/addons/deployment.yaml
	I0704 01:09:14.767054 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/deployment.yaml (1004 bytes)
	I0704 01:09:14.767114 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.808186 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.808661 1196445 out.go:177]   - Using image registry.k8s.io/metrics-server/metrics-server:v0.7.1
	I0704 01:09:14.811351 1196445 addons.go:431] installing /etc/kubernetes/addons/metrics-apiservice.yaml
	I0704 01:09:14.811385 1196445 ssh_runner.go:362] scp metrics-server/metrics-apiservice.yaml --> /etc/kubernetes/addons/metrics-apiservice.yaml (424 bytes)
	I0704 01:09:14.811464 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.828255 1196445 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0704 01:09:14.830324 1196445 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.1
	I0704 01:09:14.854527 1196445 out.go:177]   - Using image gcr.io/k8s-minikube/minikube-ingress-dns:0.0.2
	I0704 01:09:14.854766 1196445 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0704 01:09:14.855575 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0704 01:09:14.855680 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.858854 1196445 addons.go:234] Setting addon default-storageclass=true in "addons-155517"
	I0704 01:09:14.858894 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.859317 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.876557 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.884711 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-provisioner:v3.3.0
	I0704 01:09:14.887689 1196445 addons.go:431] installing /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0704 01:09:14.887713 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-dns-pod.yaml (2442 bytes)
	I0704 01:09:14.887776 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.907776 1196445 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.1
	I0704 01:09:14.921547 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-attacher:v4.0.0
	I0704 01:09:14.946618 1196445 out.go:177]   - Using image registry.k8s.io/ingress-nginx/controller:v1.10.1
	I0704 01:09:14.952851 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:14.958655 1196445 addons.go:431] installing /etc/kubernetes/addons/ingress-deploy.yaml
	I0704 01:09:14.958677 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-deploy.yaml (16078 bytes)
	I0704 01:09:14.958757 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.989105 1196445 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.30.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.30.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0704 01:09:14.998292 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-external-health-monitor-controller:v0.7.0
	I0704 01:09:15.008457 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.010183 1196445 out.go:177]   - Using image docker.io/registry:2.8.3
	I0704 01:09:15.011903 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.6.0
	I0704 01:09:15.016345 1196445 addons.go:234] Setting addon storage-provisioner-rancher=true in "addons-155517"
	I0704 01:09:15.016398 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:15.016856 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:15.017248 1196445 out.go:177]   - Using image ghcr.io/inspektor-gadget/inspektor-gadget:v0.30.0
	I0704 01:09:15.026296 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/snapshot-controller:v6.1.0
	I0704 01:09:15.032293 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/hostpathplugin:v1.9.0
	I0704 01:09:15.033528 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.034311 1196445 out.go:177]   - Using image docker.io/volcanosh/vc-controller-manager:v1.9.0
	I0704 01:09:15.036144 1196445 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml
	I0704 01:09:15.036176 1196445 ssh_runner.go:362] scp volumesnapshots/csi-hostpath-snapshotclass.yaml --> /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml (934 bytes)
	I0704 01:09:15.036273 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.040125 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/livenessprobe:v2.8.0
	I0704 01:09:15.042262 1196445 out.go:177]   - Using image gcr.io/k8s-minikube/kube-registry-proxy:0.0.6
	I0704 01:09:15.042363 1196445 out.go:177]   - Using image docker.io/volcanosh/vc-scheduler:v1.9.0
	I0704 01:09:15.044439 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-namespace.yaml
	I0704 01:09:15.044460 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-namespace.yaml --> /etc/kubernetes/addons/ig-namespace.yaml (55 bytes)
	I0704 01:09:15.044547 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.052699 1196445 addons.go:431] installing /etc/kubernetes/addons/registry-rc.yaml
	I0704 01:09:15.052768 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-rc.yaml (798 bytes)
	I0704 01:09:15.052855 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.066354 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-resizer:v1.6.0
	I0704 01:09:15.068850 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-snapshotter:v6.1.0
	I0704 01:09:15.075588 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-external-attacher.yaml
	I0704 01:09:15.075620 1196445 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-attacher.yaml --> /etc/kubernetes/addons/rbac-external-attacher.yaml (3073 bytes)
	I0704 01:09:15.075727 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.088431 1196445 out.go:177]   - Using image docker.io/volcanosh/vc-webhook-manager:v1.9.0
	I0704 01:09:15.114822 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.115791 1196445 addons.go:431] installing /etc/kubernetes/addons/volcano-deployment.yaml
	I0704 01:09:15.115824 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volcano-deployment.yaml (442770 bytes)
	I0704 01:09:15.115909 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.121303 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.123349 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.160899 1196445 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0704 01:09:15.160920 1196445 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0704 01:09:15.160989 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.162700 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.243792 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.254669 1196445 out.go:177]   - Using image docker.io/busybox:stable
	I0704 01:09:15.256265 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.257205 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.257463 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.259664 1196445 out.go:177]   - Using image docker.io/rancher/local-path-provisioner:v0.0.22
	I0704 01:09:15.261691 1196445 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0704 01:09:15.261712 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner-rancher.yaml (3113 bytes)
	I0704 01:09:15.261776 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.265635 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	W0704 01:09:15.288056 1196445 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0704 01:09:15.288087 1196445 retry.go:31] will retry after 347.093048ms: ssh: handshake failed: EOF
	I0704 01:09:15.307720 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.310433 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.322864 1196445 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0704 01:09:15.588350 1196445 addons.go:431] installing /etc/kubernetes/addons/yakd-sa.yaml
	I0704 01:09:15.588385 1196445 ssh_runner.go:362] scp yakd/yakd-sa.yaml --> /etc/kubernetes/addons/yakd-sa.yaml (247 bytes)
	I0704 01:09:15.621674 1196445 addons.go:431] installing /etc/kubernetes/addons/metrics-server-deployment.yaml
	I0704 01:09:15.621748 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-deployment.yaml (1907 bytes)
	I0704 01:09:15.749570 1196445 addons.go:431] installing /etc/kubernetes/addons/metrics-server-rbac.yaml
	I0704 01:09:15.749601 1196445 ssh_runner.go:362] scp metrics-server/metrics-server-rbac.yaml --> /etc/kubernetes/addons/metrics-server-rbac.yaml (2175 bytes)
	I0704 01:09:15.934643 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0704 01:09:15.971372 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0704 01:09:16.022036 1196445 addons.go:431] installing /etc/kubernetes/addons/metrics-server-service.yaml
	I0704 01:09:16.022108 1196445 ssh_runner.go:362] scp metrics-server/metrics-server-service.yaml --> /etc/kubernetes/addons/metrics-server-service.yaml (446 bytes)
	I0704 01:09:16.110981 1196445 addons.go:431] installing /etc/kubernetes/addons/yakd-crb.yaml
	I0704 01:09:16.111009 1196445 ssh_runner.go:362] scp yakd/yakd-crb.yaml --> /etc/kubernetes/addons/yakd-crb.yaml (422 bytes)
	I0704 01:09:16.121017 1196445 addons.go:431] installing /etc/kubernetes/addons/registry-svc.yaml
	I0704 01:09:16.121043 1196445 ssh_runner.go:362] scp registry/registry-svc.yaml --> /etc/kubernetes/addons/registry-svc.yaml (398 bytes)
	I0704 01:09:16.190720 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0704 01:09:16.201668 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml
	I0704 01:09:16.213599 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0704 01:09:16.238689 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-serviceaccount.yaml
	I0704 01:09:16.238718 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-serviceaccount.yaml --> /etc/kubernetes/addons/ig-serviceaccount.yaml (80 bytes)
	I0704 01:09:16.244065 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml
	I0704 01:09:16.263426 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/deployment.yaml
	I0704 01:09:16.295999 1196445 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml
	I0704 01:09:16.296072 1196445 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotclasses.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml (6471 bytes)
	I0704 01:09:16.307564 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	I0704 01:09:16.362987 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0704 01:09:16.383918 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-hostpath.yaml
	I0704 01:09:16.383945 1196445 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-hostpath.yaml --> /etc/kubernetes/addons/rbac-hostpath.yaml (4266 bytes)
	I0704 01:09:16.394076 1196445 addons.go:431] installing /etc/kubernetes/addons/yakd-svc.yaml
	I0704 01:09:16.394102 1196445 ssh_runner.go:362] scp yakd/yakd-svc.yaml --> /etc/kubernetes/addons/yakd-svc.yaml (412 bytes)
	I0704 01:09:16.417925 1196445 addons.go:431] installing /etc/kubernetes/addons/registry-proxy.yaml
	I0704 01:09:16.417949 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-proxy.yaml (947 bytes)
	I0704 01:09:16.422065 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-role.yaml
	I0704 01:09:16.422088 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-role.yaml --> /etc/kubernetes/addons/ig-role.yaml (210 bytes)
	I0704 01:09:16.522801 1196445 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml
	I0704 01:09:16.522826 1196445 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotcontents.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml (23126 bytes)
	I0704 01:09:16.575145 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml
	I0704 01:09:16.575171 1196445 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-health-monitor-controller.yaml --> /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml (3038 bytes)
	I0704 01:09:16.653193 1196445 addons.go:431] installing /etc/kubernetes/addons/yakd-dp.yaml
	I0704 01:09:16.653217 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-dp.yaml (2017 bytes)
	I0704 01:09:16.659744 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-rolebinding.yaml
	I0704 01:09:16.659770 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-rolebinding.yaml --> /etc/kubernetes/addons/ig-rolebinding.yaml (244 bytes)
	I0704 01:09:16.661739 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml
	I0704 01:09:16.744662 1196445 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml
	I0704 01:09:16.744688 1196445 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshots.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml (19582 bytes)
	I0704 01:09:16.821862 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-external-provisioner.yaml
	I0704 01:09:16.821888 1196445 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-provisioner.yaml --> /etc/kubernetes/addons/rbac-external-provisioner.yaml (4442 bytes)
	I0704 01:09:16.881062 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml
	I0704 01:09:16.901455 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrole.yaml
	I0704 01:09:16.901481 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrole.yaml --> /etc/kubernetes/addons/ig-clusterrole.yaml (1485 bytes)
	I0704 01:09:16.928474 1196445 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.30.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.30.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -": (1.939327122s)
	I0704 01:09:16.928503 1196445 start.go:967] {"host.minikube.internal": 192.168.49.1} host record injected into CoreDNS's ConfigMap
	I0704 01:09:16.929502 1196445 ssh_runner.go:235] Completed: sudo systemctl start kubelet: (1.606615135s)
	I0704 01:09:16.930207 1196445 node_ready.go:35] waiting up to 6m0s for node "addons-155517" to be "Ready" ...
	I0704 01:09:16.937817 1196445 node_ready.go:49] node "addons-155517" has status "Ready":"True"
	I0704 01:09:16.937845 1196445 node_ready.go:38] duration metric: took 7.604392ms for node "addons-155517" to be "Ready" ...
	I0704 01:09:16.937855 1196445 pod_ready.go:35] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0704 01:09:16.962597 1196445 pod_ready.go:78] waiting up to 6m0s for pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:17.098912 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml
	I0704 01:09:17.098985 1196445 ssh_runner.go:362] scp volumesnapshots/rbac-volume-snapshot-controller.yaml --> /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml (3545 bytes)
	I0704 01:09:17.131539 1196445 addons.go:431] installing /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0704 01:09:17.131609 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml (1475 bytes)
	I0704 01:09:17.201528 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-external-resizer.yaml
	I0704 01:09:17.201552 1196445 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-resizer.yaml --> /etc/kubernetes/addons/rbac-external-resizer.yaml (2943 bytes)
	I0704 01:09:17.281563 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrolebinding.yaml
	I0704 01:09:17.281585 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrolebinding.yaml --> /etc/kubernetes/addons/ig-clusterrolebinding.yaml (274 bytes)
	I0704 01:09:17.383540 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-external-snapshotter.yaml
	I0704 01:09:17.383612 1196445 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-snapshotter.yaml --> /etc/kubernetes/addons/rbac-external-snapshotter.yaml (3149 bytes)
	I0704 01:09:17.431613 1196445 kapi.go:248] "coredns" deployment in "kube-system" namespace and "addons-155517" context rescaled to 1 replicas
	I0704 01:09:17.491254 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0704 01:09:17.630881 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-crd.yaml
	I0704 01:09:17.630958 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-crd.yaml --> /etc/kubernetes/addons/ig-crd.yaml (5216 bytes)
	I0704 01:09:17.673774 1196445 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-attacher.yaml
	I0704 01:09:17.673845 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-attacher.yaml (2143 bytes)
	I0704 01:09:17.747064 1196445 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml
	I0704 01:09:17.747141 1196445 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-driverinfo.yaml --> /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml (1274 bytes)
	I0704 01:09:17.825561 1196445 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-plugin.yaml
	I0704 01:09:17.825630 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-plugin.yaml (8201 bytes)
	I0704 01:09:17.839617 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-daemonset.yaml
	I0704 01:09:17.839687 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-daemonset.yaml (7735 bytes)
	I0704 01:09:18.021499 1196445 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-resizer.yaml
	I0704 01:09:18.021581 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-resizer.yaml (2191 bytes)
	I0704 01:09:18.066416 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml
	I0704 01:09:18.103144 1196445 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0704 01:09:18.103227 1196445 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-storageclass.yaml --> /etc/kubernetes/addons/csi-hostpath-storageclass.yaml (846 bytes)
	I0704 01:09:18.206551 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0704 01:09:18.972681 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:19.481076 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml: (3.546393824s)
	I0704 01:09:19.481267 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml: (3.509817533s)
	I0704 01:09:19.707411 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (3.516628882s)
	I0704 01:09:21.018313 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:22.094516 1196445 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_application_credentials.json (162 bytes)
	I0704 01:09:22.094602 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:22.120851 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:22.498250 1196445 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_cloud_project (12 bytes)
	I0704 01:09:22.672494 1196445 addons.go:234] Setting addon gcp-auth=true in "addons-155517"
	I0704 01:09:22.672553 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:22.672999 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:22.693321 1196445 ssh_runner.go:195] Run: cat /var/lib/minikube/google_application_credentials.json
	I0704 01:09:22.693374 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:22.731686 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:23.219625 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml: (7.017917581s)
	I0704 01:09:23.219660 1196445 addons.go:475] Verifying addon ingress=true in "addons-155517"
	I0704 01:09:23.219801 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: (7.006176358s)
	I0704 01:09:23.223048 1196445 out.go:177] * Verifying ingress addon...
	I0704 01:09:23.225816 1196445 kapi.go:75] Waiting for pod with label "app.kubernetes.io/name=ingress-nginx" in ns "ingress-nginx" ...
	I0704 01:09:23.233823 1196445 kapi.go:86] Found 3 Pods for label selector app.kubernetes.io/name=ingress-nginx
	I0704 01:09:23.233848 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:23.469592 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:23.733369 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:24.252870 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:24.738392 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:25.197716 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml: (8.95361222s)
	I0704 01:09:25.197838 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/deployment.yaml: (8.934389448s)
	I0704 01:09:25.197945 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: (8.890302325s)
	I0704 01:09:25.197981 1196445 addons.go:475] Verifying addon metrics-server=true in "addons-155517"
	I0704 01:09:25.198051 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml: (8.835024851s)
	I0704 01:09:25.198102 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml: (8.536339738s)
	I0704 01:09:25.198137 1196445 addons.go:475] Verifying addon registry=true in "addons-155517"
	I0704 01:09:25.198304 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml: (8.317214693s)
	I0704 01:09:25.198380 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (7.707056177s)
	W0704 01:09:25.199807 1196445 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0704 01:09:25.199830 1196445 retry.go:31] will retry after 346.398491ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0704 01:09:25.198438 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml: (7.131941092s)
	I0704 01:09:25.201793 1196445 out.go:177] * Verifying registry addon...
	I0704 01:09:25.201794 1196445 out.go:177] * To access YAKD - Kubernetes Dashboard, wait for Pod to be ready and run the following command:
	
		minikube -p addons-155517 service yakd-dashboard -n yakd-dashboard
	
	I0704 01:09:25.204942 1196445 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=registry" in ns "kube-system" ...
	I0704 01:09:25.222617 1196445 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=registry
	I0704 01:09:25.222690 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:25.251428 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:25.492382 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:25.546710 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0704 01:09:25.735322 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:25.736772 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:25.911009 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml: (7.704355252s)
	I0704 01:09:25.911047 1196445 addons.go:475] Verifying addon csi-hostpath-driver=true in "addons-155517"
	I0704 01:09:25.911278 1196445 ssh_runner.go:235] Completed: cat /var/lib/minikube/google_application_credentials.json: (3.217934924s)
	I0704 01:09:25.914361 1196445 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.1
	I0704 01:09:25.914416 1196445 out.go:177] * Verifying csi-hostpath-driver addon...
	I0704 01:09:25.916249 1196445 out.go:177]   - Using image gcr.io/k8s-minikube/gcp-auth-webhook:v0.1.2
	I0704 01:09:25.917135 1196445 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=csi-hostpath-driver" in ns "kube-system" ...
	I0704 01:09:25.918627 1196445 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-ns.yaml
	I0704 01:09:25.918653 1196445 ssh_runner.go:362] scp gcp-auth/gcp-auth-ns.yaml --> /etc/kubernetes/addons/gcp-auth-ns.yaml (700 bytes)
	I0704 01:09:25.925376 1196445 kapi.go:86] Found 3 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
	I0704 01:09:25.925401 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:25.957453 1196445 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-service.yaml
	I0704 01:09:25.957479 1196445 ssh_runner.go:362] scp gcp-auth/gcp-auth-service.yaml --> /etc/kubernetes/addons/gcp-auth-service.yaml (788 bytes)
	I0704 01:09:25.985833 1196445 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0704 01:09:25.985857 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/gcp-auth-webhook.yaml (5421 bytes)
	I0704 01:09:26.010928 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0704 01:09:26.210069 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:26.232731 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:26.424161 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:26.710162 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:26.731168 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:26.923170 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:27.210427 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:27.230934 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:27.250410 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (1.703595614s)
	I0704 01:09:27.250521 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml: (1.23956747s)
	I0704 01:09:27.253393 1196445 addons.go:475] Verifying addon gcp-auth=true in "addons-155517"
	I0704 01:09:27.257553 1196445 out.go:177] * Verifying gcp-auth addon...
	I0704 01:09:27.260382 1196445 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=gcp-auth" in ns "gcp-auth" ...
	I0704 01:09:27.262998 1196445 kapi.go:86] Found 0 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0704 01:09:27.424027 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:27.710878 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:27.730370 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:27.923796 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:27.969295 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:28.211580 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:28.231431 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:28.422656 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:28.711283 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:28.730044 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:28.924689 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:29.211847 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:29.231572 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:29.425562 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:29.710139 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:29.731027 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:29.923390 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:30.210230 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:30.231858 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:30.424018 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:30.469196 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:30.709718 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:30.730263 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:30.922825 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:31.219839 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:31.241048 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:31.425597 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:31.712545 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:31.731465 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:31.922640 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:32.210366 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:32.230954 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:32.422551 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:32.710061 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:32.730153 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:32.925535 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:32.968875 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:33.209869 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:33.230720 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:33.423400 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:33.710614 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:33.730154 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:33.923184 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:34.211208 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:34.230331 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:34.422808 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:34.710269 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:34.730201 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:34.923588 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:34.969882 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:35.210283 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:35.230873 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:35.422763 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:35.709465 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:35.730270 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:35.923232 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:36.209479 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:36.229850 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:36.423197 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:36.710129 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:36.730676 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:36.922710 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:37.209525 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:37.230493 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:37.424298 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:37.468432 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:37.710143 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:37.730780 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:37.922415 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:38.209676 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:38.230719 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:38.422303 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:38.710124 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:38.730126 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:38.922826 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:39.210085 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:39.230519 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:39.423189 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:39.468806 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:39.710065 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:39.730167 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:39.923743 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:40.209304 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:40.230675 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:40.422186 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:40.710013 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:40.730219 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:40.923123 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:41.211576 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:41.237098 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:41.423541 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:41.469660 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:41.710859 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:41.731203 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:41.923394 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:42.210467 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:42.231587 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:42.423307 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:42.709772 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:42.729958 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:42.923400 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:43.210064 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:43.230380 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:43.423544 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:43.709869 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:43.730334 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:43.925613 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:43.976063 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:44.216521 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:44.233500 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:44.423984 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:44.709890 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:44.730491 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:44.922620 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:45.218269 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:45.232042 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:45.423431 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:45.709814 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:45.730412 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:45.924578 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:46.210332 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:46.231072 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:46.424267 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:46.482456 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:46.709782 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:46.730358 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:46.925109 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:47.210160 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:47.230642 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:47.422979 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:47.710273 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:47.730748 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:47.937531 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:48.210020 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:48.230451 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:48.423194 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:48.709898 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:48.732172 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:48.923290 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:48.969889 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:49.212283 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:49.230721 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:49.423247 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:49.710414 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:49.730459 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:49.922796 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:50.210560 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:50.233100 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:50.423363 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:50.710514 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:50.733774 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:50.922714 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:51.211285 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:51.230995 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:51.423332 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:51.472262 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:51.710160 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:51.730450 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:51.925154 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:52.210272 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:52.231397 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:52.423658 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:52.710851 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:52.730326 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:52.923960 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:53.212484 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:53.232166 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:53.424097 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:53.710891 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:53.731740 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:53.924019 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:53.968656 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:54.211161 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:54.232059 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:54.423259 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:54.712488 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:54.731251 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:54.924094 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:55.210510 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:55.231539 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:55.422966 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:55.711791 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:55.733623 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:55.930765 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:55.969815 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:56.210040 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:56.230481 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:56.423900 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:56.717782 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:56.733051 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:56.924352 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:57.210192 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:57.231623 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:57.423375 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:57.712974 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:57.739221 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:57.924312 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:58.210188 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:58.231084 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:58.428301 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:58.469048 1196445 pod_ready.go:92] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"True"
	I0704 01:09:58.469072 1196445 pod_ready.go:81] duration metric: took 41.506375249s for pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.469083 1196445 pod_ready.go:78] waiting up to 6m0s for pod "coredns-7db6d8ff4d-kd68p" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.470959 1196445 pod_ready.go:97] error getting pod "coredns-7db6d8ff4d-kd68p" in "kube-system" namespace (skipping!): pods "coredns-7db6d8ff4d-kd68p" not found
	I0704 01:09:58.470985 1196445 pod_ready.go:81] duration metric: took 1.892793ms for pod "coredns-7db6d8ff4d-kd68p" in "kube-system" namespace to be "Ready" ...
	E0704 01:09:58.470996 1196445 pod_ready.go:66] WaitExtra: waitPodCondition: error getting pod "coredns-7db6d8ff4d-kd68p" in "kube-system" namespace (skipping!): pods "coredns-7db6d8ff4d-kd68p" not found
	I0704 01:09:58.471003 1196445 pod_ready.go:78] waiting up to 6m0s for pod "etcd-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.476539 1196445 pod_ready.go:92] pod "etcd-addons-155517" in "kube-system" namespace has status "Ready":"True"
	I0704 01:09:58.476565 1196445 pod_ready.go:81] duration metric: took 5.554425ms for pod "etcd-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.476580 1196445 pod_ready.go:78] waiting up to 6m0s for pod "kube-apiserver-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.482116 1196445 pod_ready.go:92] pod "kube-apiserver-addons-155517" in "kube-system" namespace has status "Ready":"True"
	I0704 01:09:58.482141 1196445 pod_ready.go:81] duration metric: took 5.552694ms for pod "kube-apiserver-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.482153 1196445 pod_ready.go:78] waiting up to 6m0s for pod "kube-controller-manager-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.488450 1196445 pod_ready.go:92] pod "kube-controller-manager-addons-155517" in "kube-system" namespace has status "Ready":"True"
	I0704 01:09:58.488476 1196445 pod_ready.go:81] duration metric: took 6.314983ms for pod "kube-controller-manager-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.488488 1196445 pod_ready.go:78] waiting up to 6m0s for pod "kube-proxy-62r6j" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.667045 1196445 pod_ready.go:92] pod "kube-proxy-62r6j" in "kube-system" namespace has status "Ready":"True"
	I0704 01:09:58.667072 1196445 pod_ready.go:81] duration metric: took 178.576255ms for pod "kube-proxy-62r6j" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.667083 1196445 pod_ready.go:78] waiting up to 6m0s for pod "kube-scheduler-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.709864 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:58.730495 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:58.923133 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:59.067974 1196445 pod_ready.go:92] pod "kube-scheduler-addons-155517" in "kube-system" namespace has status "Ready":"True"
	I0704 01:09:59.068046 1196445 pod_ready.go:81] duration metric: took 400.954049ms for pod "kube-scheduler-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:59.068072 1196445 pod_ready.go:38] duration metric: took 42.130204788s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0704 01:09:59.068115 1196445 api_server.go:52] waiting for apiserver process to appear ...
	I0704 01:09:59.068215 1196445 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0704 01:09:59.083368 1196445 api_server.go:72] duration metric: took 44.510664696s to wait for apiserver process to appear ...
	I0704 01:09:59.083466 1196445 api_server.go:88] waiting for apiserver healthz status ...
	I0704 01:09:59.083524 1196445 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0704 01:09:59.092379 1196445 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0704 01:09:59.093665 1196445 api_server.go:141] control plane version: v1.30.2
	I0704 01:09:59.093689 1196445 api_server.go:131] duration metric: took 10.180723ms to wait for apiserver health ...
	I0704 01:09:59.093697 1196445 system_pods.go:43] waiting for kube-system pods to appear ...
	I0704 01:09:59.210881 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:59.230670 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:59.277516 1196445 system_pods.go:59] 18 kube-system pods found
	I0704 01:09:59.277591 1196445 system_pods.go:61] "coredns-7db6d8ff4d-5x2l7" [6344a526-e705-4a50-9a44-66c0d35a0ca8] Running
	I0704 01:09:59.277614 1196445 system_pods.go:61] "csi-hostpath-attacher-0" [e4b634d1-4641-4cde-bcdc-b5e48be74e6b] Pending / Ready:ContainersNotReady (containers with unready status: [csi-attacher]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-attacher])
	I0704 01:09:59.277638 1196445 system_pods.go:61] "csi-hostpath-resizer-0" [4f1a6b44-70cb-43a2-bec7-e0213f06ffd3] Pending / Ready:ContainersNotReady (containers with unready status: [csi-resizer]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-resizer])
	I0704 01:09:59.277674 1196445 system_pods.go:61] "csi-hostpathplugin-bwns5" [a5928f29-2395-4b5b-b09f-baae6183f4ff] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
	I0704 01:09:59.277700 1196445 system_pods.go:61] "etcd-addons-155517" [119c13b1-a5b8-434b-bc29-7f32f6a9ed2b] Running
	I0704 01:09:59.277723 1196445 system_pods.go:61] "kindnet-7qr8x" [c5fbe50b-fa8d-4022-8aa3-bbbca5f27060] Running
	I0704 01:09:59.277743 1196445 system_pods.go:61] "kube-apiserver-addons-155517" [c515e441-ac73-42b4-9e34-b5d4b684a9a4] Running
	I0704 01:09:59.277782 1196445 system_pods.go:61] "kube-controller-manager-addons-155517" [c9334f1b-c695-4aec-aa4d-b853d9bf214c] Running
	I0704 01:09:59.277811 1196445 system_pods.go:61] "kube-ingress-dns-minikube" [b7e45300-1c0f-463c-914d-3febc516e196] Running / Ready:ContainersNotReady (containers with unready status: [minikube-ingress-dns]) / ContainersReady:ContainersNotReady (containers with unready status: [minikube-ingress-dns])
	I0704 01:09:59.277834 1196445 system_pods.go:61] "kube-proxy-62r6j" [4818ac60-abd9-4281-90e2-df11f62e8455] Running
	I0704 01:09:59.277855 1196445 system_pods.go:61] "kube-scheduler-addons-155517" [1802b842-df6a-4872-b790-17ac5e8ad808] Running
	I0704 01:09:59.277890 1196445 system_pods.go:61] "metrics-server-c59844bb4-csqts" [d63007ff-883d-4b4c-b4e4-b83cf3f5e613] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
	I0704 01:09:59.277919 1196445 system_pods.go:61] "nvidia-device-plugin-daemonset-gr25g" [8a2a5166-3ff2-4a7c-8665-62fade8bd24f] Pending / Ready:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr]) / ContainersReady:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr])
	I0704 01:09:59.277940 1196445 system_pods.go:61] "registry-dm5v6" [15499167-b529-4f01-b177-75b6be18e2b5] Running
	I0704 01:09:59.277965 1196445 system_pods.go:61] "registry-proxy-fndt4" [36d06042-3c9e-400c-a673-4f8f17e23b46] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
	I0704 01:09:59.278000 1196445 system_pods.go:61] "snapshot-controller-745499f584-k4nhl" [2536525a-d8b5-4b7d-85e5-2a92611f9623] Running
	I0704 01:09:59.278027 1196445 system_pods.go:61] "snapshot-controller-745499f584-sc4kq" [1a35e6e8-e90a-4d20-972a-3158fa6b4d10] Running
	I0704 01:09:59.278049 1196445 system_pods.go:61] "storage-provisioner" [12fb9780-61c0-4ea8-9b4b-e054c37b7af8] Running
	I0704 01:09:59.278072 1196445 system_pods.go:74] duration metric: took 184.367763ms to wait for pod list to return data ...
	I0704 01:09:59.278106 1196445 default_sa.go:34] waiting for default service account to be created ...
	I0704 01:09:59.423626 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:59.467163 1196445 default_sa.go:45] found service account: "default"
	I0704 01:09:59.467239 1196445 default_sa.go:55] duration metric: took 189.106033ms for default service account to be created ...
	I0704 01:09:59.467265 1196445 system_pods.go:116] waiting for k8s-apps to be running ...
	I0704 01:09:59.676387 1196445 system_pods.go:86] 18 kube-system pods found
	I0704 01:09:59.676469 1196445 system_pods.go:89] "coredns-7db6d8ff4d-5x2l7" [6344a526-e705-4a50-9a44-66c0d35a0ca8] Running
	I0704 01:09:59.676493 1196445 system_pods.go:89] "csi-hostpath-attacher-0" [e4b634d1-4641-4cde-bcdc-b5e48be74e6b] Pending / Ready:ContainersNotReady (containers with unready status: [csi-attacher]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-attacher])
	I0704 01:09:59.676519 1196445 system_pods.go:89] "csi-hostpath-resizer-0" [4f1a6b44-70cb-43a2-bec7-e0213f06ffd3] Pending / Ready:ContainersNotReady (containers with unready status: [csi-resizer]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-resizer])
	I0704 01:09:59.676561 1196445 system_pods.go:89] "csi-hostpathplugin-bwns5" [a5928f29-2395-4b5b-b09f-baae6183f4ff] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
	I0704 01:09:59.676582 1196445 system_pods.go:89] "etcd-addons-155517" [119c13b1-a5b8-434b-bc29-7f32f6a9ed2b] Running
	I0704 01:09:59.676604 1196445 system_pods.go:89] "kindnet-7qr8x" [c5fbe50b-fa8d-4022-8aa3-bbbca5f27060] Running
	I0704 01:09:59.676642 1196445 system_pods.go:89] "kube-apiserver-addons-155517" [c515e441-ac73-42b4-9e34-b5d4b684a9a4] Running
	I0704 01:09:59.676667 1196445 system_pods.go:89] "kube-controller-manager-addons-155517" [c9334f1b-c695-4aec-aa4d-b853d9bf214c] Running
	I0704 01:09:59.676692 1196445 system_pods.go:89] "kube-ingress-dns-minikube" [b7e45300-1c0f-463c-914d-3febc516e196] Running / Ready:ContainersNotReady (containers with unready status: [minikube-ingress-dns]) / ContainersReady:ContainersNotReady (containers with unready status: [minikube-ingress-dns])
	I0704 01:09:59.676714 1196445 system_pods.go:89] "kube-proxy-62r6j" [4818ac60-abd9-4281-90e2-df11f62e8455] Running
	I0704 01:09:59.676748 1196445 system_pods.go:89] "kube-scheduler-addons-155517" [1802b842-df6a-4872-b790-17ac5e8ad808] Running
	I0704 01:09:59.677464 1196445 system_pods.go:89] "metrics-server-c59844bb4-csqts" [d63007ff-883d-4b4c-b4e4-b83cf3f5e613] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
	I0704 01:09:59.677489 1196445 system_pods.go:89] "nvidia-device-plugin-daemonset-gr25g" [8a2a5166-3ff2-4a7c-8665-62fade8bd24f] Pending / Ready:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr]) / ContainersReady:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr])
	I0704 01:09:59.677516 1196445 system_pods.go:89] "registry-dm5v6" [15499167-b529-4f01-b177-75b6be18e2b5] Running
	I0704 01:09:59.677555 1196445 system_pods.go:89] "registry-proxy-fndt4" [36d06042-3c9e-400c-a673-4f8f17e23b46] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
	I0704 01:09:59.677573 1196445 system_pods.go:89] "snapshot-controller-745499f584-k4nhl" [2536525a-d8b5-4b7d-85e5-2a92611f9623] Running
	I0704 01:09:59.677594 1196445 system_pods.go:89] "snapshot-controller-745499f584-sc4kq" [1a35e6e8-e90a-4d20-972a-3158fa6b4d10] Running
	I0704 01:09:59.677626 1196445 system_pods.go:89] "storage-provisioner" [12fb9780-61c0-4ea8-9b4b-e054c37b7af8] Running
	I0704 01:09:59.677655 1196445 system_pods.go:126] duration metric: took 210.368801ms to wait for k8s-apps to be running ...
	I0704 01:09:59.677675 1196445 system_svc.go:44] waiting for kubelet service to be running ....
	I0704 01:09:59.677762 1196445 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0704 01:09:59.696237 1196445 system_svc.go:56] duration metric: took 18.553501ms WaitForService to wait for kubelet
	I0704 01:09:59.696315 1196445 kubeadm.go:576] duration metric: took 45.12361654s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0704 01:09:59.696350 1196445 node_conditions.go:102] verifying NodePressure condition ...
	I0704 01:09:59.714261 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:59.736009 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:59.867078 1196445 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0704 01:09:59.867113 1196445 node_conditions.go:123] node cpu capacity is 2
	I0704 01:09:59.867127 1196445 node_conditions.go:105] duration metric: took 170.755162ms to run NodePressure ...
	I0704 01:09:59.867149 1196445 start.go:241] waiting for startup goroutines ...
	I0704 01:09:59.923389 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:00.218625 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:00.241517 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:00.438305 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:00.710808 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:00.730534 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:00.926440 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:01.210272 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:01.232796 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:01.429002 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:01.710041 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:01.730869 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:01.924722 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:02.210497 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:02.231671 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:02.438549 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:02.710555 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:02.735794 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:02.924538 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:03.210613 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:03.231512 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:03.424944 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:03.711675 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:03.733187 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:03.929527 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:04.209595 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:04.230995 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:04.427465 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:04.710195 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:04.731186 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:04.924972 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:05.211604 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:05.231389 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:05.423431 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:05.711411 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:05.734962 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:05.923586 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:06.210512 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:06.231004 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:06.423318 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:06.711560 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:06.730836 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:06.938452 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:07.210046 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:07.230933 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:07.425316 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:07.710091 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:07.730834 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:07.922615 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:08.211590 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:08.230434 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:08.423538 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:08.710046 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:08.733862 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:08.924256 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:09.209864 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:09.230148 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:09.423756 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:09.710673 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:09.731199 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:09.925117 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:10.212967 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:10.232598 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:10.424087 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:10.713769 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:10.730323 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:10.923900 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:11.210573 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:11.231728 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:11.425825 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:11.709221 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:11.730266 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:11.923382 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:12.209500 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:12.230865 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:12.422855 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:12.709768 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:12.731470 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:12.923005 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:13.209436 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:13.230328 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:13.422933 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:13.709576 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:13.730924 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:13.922558 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:14.210379 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:14.230443 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:14.423308 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:14.710027 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:14.730518 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:14.922814 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:15.211531 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:15.230606 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:15.423172 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:15.711224 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:15.730357 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:15.923854 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:16.214100 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:16.236362 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:16.422969 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:16.709735 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:16.730186 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:16.924010 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:17.209861 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:17.230221 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:17.429620 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:17.711216 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:17.731288 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:17.923272 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:18.210800 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:18.230675 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:18.423721 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:18.709748 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:18.730911 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:18.924515 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:19.210261 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:19.231378 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:19.424525 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:19.709993 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:19.732993 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:19.924083 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:20.210003 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:20.230897 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:20.423785 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:20.711057 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:20.730952 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:20.924157 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:21.210312 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:21.230586 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:21.423795 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:21.710239 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:21.732286 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:21.926006 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:22.210368 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:22.230949 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:22.423210 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:22.709928 1196445 kapi.go:107] duration metric: took 57.504984289s to wait for kubernetes.io/minikube-addons=registry ...
	I0704 01:10:22.731279 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:22.923850 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:23.230884 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:23.423811 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:23.730105 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:23.922926 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:24.229930 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:24.423940 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:24.733279 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:24.923057 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:25.232439 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:25.423165 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:25.731393 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:25.924116 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:26.230888 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:26.423217 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:26.730398 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:26.922554 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:27.231244 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:27.427320 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:27.732045 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:27.924673 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:28.231378 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:28.423152 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:28.730530 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:28.922455 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:29.231220 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:29.423963 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:29.731263 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:29.923847 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:30.234736 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:30.424146 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:30.730525 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:30.923960 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:31.230545 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:31.422889 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:31.730356 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:31.922685 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:32.230220 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:32.423403 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:32.731689 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:32.923390 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:33.230844 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:33.424895 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:33.734207 1196445 kapi.go:107] duration metric: took 1m10.508389549s to wait for app.kubernetes.io/name=ingress-nginx ...
	I0704 01:10:33.927331 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:34.424003 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:34.924230 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:35.422420 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:35.922720 1196445 kapi.go:107] duration metric: took 1m10.005581993s to wait for kubernetes.io/minikube-addons=csi-hostpath-driver ...
	I0704 01:10:50.263958 1196445 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0704 01:10:50.263985 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0704 01:10:50.764249 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0704 01:10:51.264467 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0704 01:10:51.764892 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0704 01:10:52.264523 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0704 01:10:52.764246 1196445 kapi.go:107] duration metric: took 1m25.503859259s to wait for kubernetes.io/minikube-addons=gcp-auth ...
	I0704 01:10:52.766320 1196445 out.go:177] * Your GCP credentials will now be mounted into every pod created in the addons-155517 cluster.
	I0704 01:10:52.767887 1196445 out.go:177] * If you don't want your credentials mounted into a specific pod, add a label with the `gcp-auth-skip-secret` key to your pod configuration.
	I0704 01:10:52.769442 1196445 out.go:177] * If you want existing pods to be mounted with credentials, either recreate them or rerun addons enable with --refresh.
	I0704 01:10:52.771171 1196445 out.go:177] * Enabled addons: nvidia-device-plugin, storage-provisioner-rancher, storage-provisioner, default-storageclass, volcano, cloud-spanner, metrics-server, ingress-dns, inspektor-gadget, yakd, volumesnapshots, registry, ingress, csi-hostpath-driver, gcp-auth
	I0704 01:10:52.772773 1196445 addons.go:510] duration metric: took 1m38.199659863s for enable addons: enabled=[nvidia-device-plugin storage-provisioner-rancher storage-provisioner default-storageclass volcano cloud-spanner metrics-server ingress-dns inspektor-gadget yakd volumesnapshots registry ingress csi-hostpath-driver gcp-auth]
	I0704 01:10:52.772819 1196445 start.go:246] waiting for cluster config update ...
	I0704 01:10:52.772852 1196445 start.go:255] writing updated cluster config ...
	I0704 01:10:52.773191 1196445 ssh_runner.go:195] Run: rm -f paused
	I0704 01:10:53.108848 1196445 start.go:600] kubectl: 1.30.2, cluster: 1.30.2 (minor skew: 0)
	I0704 01:10:53.111322 1196445 out.go:177] * Done! kubectl is now configured to use "addons-155517" cluster and "default" namespace by default
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	ba40c8fe3b49d       dd1b12fcb6097       2 minutes ago       Exited              hello-world-app           11                  f656a264fb3c3       hello-world-app-86c47465fc-kcpm6
	044ce99825eb4       443d199e8bfcc       33 minutes ago      Exited              nginx                     0                   6b6375f3d6290       test-job-nginx-0
	c46f76128f4b3       5461b18aaccf3       33 minutes ago      Running             nginx                     0                   cc913652d669a       nginx
	1e595060faf3e       6cb7dcc2008fa       37 minutes ago      Running             headlamp                  0                   2bdbce3d0506c       headlamp-7867546754-xmxdd
	176e35400e6b4       6ef582f3ec844       37 minutes ago      Running             gcp-auth                  0                   ef5df83821e38       gcp-auth-5db96cd9b4-s44p2
	671aa72a09a1e       8b46b1cd48760       38 minutes ago      Running             admission                 0                   85a1b65a34b3a       volcano-admission-5f7844f7bc-kv4hh
	bcb962151c498       1505f556b3a7b       38 minutes ago      Running             volcano-controllers       0                   969b0063a7566       volcano-controllers-59cb4746db-b7bd8
	591fbae21d6c8       d1ca868ab82aa       38 minutes ago      Running             gadget                    2                   3d0e4ad5c6381       gadget-9pgwd
	e0f993f36de9c       d9c7ad4c226bf       38 minutes ago      Running             volcano-scheduler         0                   60ad9646728c0       volcano-scheduler-844f6db89b-bwtk7
	b600d65f7bbdc       77bdba588b953       38 minutes ago      Running             yakd                      0                   c7305ce05f616       yakd-dashboard-799879c74f-gdj2b
	57beab3f7e83e       2437cf7621777       38 minutes ago      Running             coredns                   0                   c1dabb0588375       coredns-7db6d8ff4d-5x2l7
	ae2716d367698       d1ca868ab82aa       38 minutes ago      Exited              gadget                    1                   3d0e4ad5c6381       gadget-9pgwd
	33899ba7d5911       ba04bb24b9575       38 minutes ago      Running             storage-provisioner       0                   b9a422421a145       storage-provisioner
	67cab13d85edc       89d73d416b992       38 minutes ago      Running             kindnet-cni               0                   d962f3914dbda       kindnet-7qr8x
	8a696cf44b3b1       66dbb96a9149f       38 minutes ago      Running             kube-proxy                0                   e274a6aef2e08       kube-proxy-62r6j
	d26768fcef3f9       e1dcc3400d3ea       39 minutes ago      Running             kube-controller-manager   0                   fad3a49558544       kube-controller-manager-addons-155517
	566fe4aca8adb       c7dd04b1bafeb       39 minutes ago      Running             kube-scheduler            0                   55af540582842       kube-scheduler-addons-155517
	d5678b588829b       84c601f3f72c8       39 minutes ago      Running             kube-apiserver            0                   6b54dc456af60       kube-apiserver-addons-155517
	f517c28ada419       014faa467e297       39 minutes ago      Running             etcd                      0                   edf3cfc1bc506       etcd-addons-155517
	
	
	==> containerd <==
	Jul 04 01:47:36 addons-155517 containerd[812]: time="2024-07-04T01:47:36.996072494Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"7d9d465ce4ea71ff6fe01bcca064fdeb3070a7eda49ca0680efaf4de5847f300\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:47:41 addons-155517 containerd[812]: time="2024-07-04T01:47:41.954285823Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"32a4288c6e6f827c0275815960a9e61143effe5242a8f9d8adf84f8dd06cfcb1\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:47:41 addons-155517 containerd[812]: time="2024-07-04T01:47:41.971750574Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"e0392dde63c852de5ab46e7c315d883920d98a41db1b87af7d9c8c4f15793b54\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:47:41 addons-155517 containerd[812]: time="2024-07-04T01:47:41.989578337Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"b53b41d076c30da35a71b409003922b3fe5131262d77cf2ca5433decc80fd543\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:47:46 addons-155517 containerd[812]: time="2024-07-04T01:47:46.957322820Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"09f49646a933d0c9284e7d84b1fe97ef868037717df291e8714cc0feba14c2f7\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:47:46 addons-155517 containerd[812]: time="2024-07-04T01:47:46.972538144Z" level=error msg="ttrpc: received message on inactive stream" stream=41853
	Jul 04 01:47:46 addons-155517 containerd[812]: time="2024-07-04T01:47:46.973210548Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"ced7d4327173d7a5c66a24be47b6854c2d5f0729122b09e439c21227e3a4e55b\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:47:46 addons-155517 containerd[812]: time="2024-07-04T01:47:46.988998880Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"337d184d93574455508fa55ec53347e213804c349bc0850f1f5beb5b299ea36e\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:47:51 addons-155517 containerd[812]: time="2024-07-04T01:47:51.956328560Z" level=error msg="ttrpc: received message on inactive stream" stream=41911
	Jul 04 01:47:51 addons-155517 containerd[812]: time="2024-07-04T01:47:51.957428106Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"a2670010e0cc7e0feb0bbb11c346eee03fc7078872e9252957c0fe98f4785adc\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:47:51 addons-155517 containerd[812]: time="2024-07-04T01:47:51.972143707Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"d9b73eddc90f11e585a18df9844c560327a53e00171498dfdd71046f759b270e\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:47:51 addons-155517 containerd[812]: time="2024-07-04T01:47:51.988196485Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"92ad3762f8736517f2270992f8715cc43f5b417f21d6e63fb556997480df0dec\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:47:56 addons-155517 containerd[812]: time="2024-07-04T01:47:56.958196333Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"c9c541a95d56708691cf2ec4fcd930fdda4af7bddafd5748b6c6c297d4e756e6\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:47:56 addons-155517 containerd[812]: time="2024-07-04T01:47:56.982631353Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"629515ff3da8e89e51042374b60f8bdde9cddad0d16d41db3a6898d79212623b\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:47:57 addons-155517 containerd[812]: time="2024-07-04T01:47:57.016072781Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"7ad8523bae3186a7b17b652015bbe2e3a4aab931b6196cc450f86aaab52178e1\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:48:01 addons-155517 containerd[812]: time="2024-07-04T01:48:01.956744627Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"b721da210541696b1728e332a22e66092fd53c59c6410d24d6fec8fe1bbda10b\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:48:01 addons-155517 containerd[812]: time="2024-07-04T01:48:01.972444207Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"7d4bbb3fc04f460cd0dd3ec4fd5956d338c5e586510b2d6b1cdd24137c2c2bf7\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:48:01 addons-155517 containerd[812]: time="2024-07-04T01:48:01.984601644Z" level=error msg="ttrpc: received message on inactive stream" stream=42135
	Jul 04 01:48:01 addons-155517 containerd[812]: time="2024-07-04T01:48:01.985283771Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"0505d507e296d33f44cbc780b86b149a2d6db793ba99c7c20375c236259cbfc3\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:48:06 addons-155517 containerd[812]: time="2024-07-04T01:48:06.954263061Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"1492eb8d23e14493ff75a3d3be41f69ee269490a354abb1f19fd94f49a2c828b\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:48:06 addons-155517 containerd[812]: time="2024-07-04T01:48:06.968559233Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"9d40d1befda15c3b466c946bdc8a148e6400460d8acc849d9325fcbb376a01dd\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:48:06 addons-155517 containerd[812]: time="2024-07-04T01:48:06.981215926Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"b40577cf1d1f68b4ce117a4b7956384a363841fcadc7bc55412fee4e0101c29b\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:48:11 addons-155517 containerd[812]: time="2024-07-04T01:48:11.955626530Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"313306d3abb4f2d025c2082c7e741daa7818bbdf58a86d8404a94e112ff16203\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:48:11 addons-155517 containerd[812]: time="2024-07-04T01:48:11.969428338Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"174d0e08b879df5aece3faf447c8ed5620f4547415c4c300f25cbb21c471d3e5\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:48:11 addons-155517 containerd[812]: time="2024-07-04T01:48:11.983573572Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"d8f2c69db1d36ab9c269785a6342f1dfde1f28c9730b107a2af6ca0aebbaa8b0\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	
	
	==> coredns [57beab3f7e83e507ce7e6fb884cd0d41c8e35bd5a2316cbae1f3e08b24e70f6c] <==
	[INFO] 10.244.0.21:59559 - 20477 "AAAA IN hello-world-app.default.svc.cluster.local.cluster.local. udp 73 false 512" NXDOMAIN qr,aa,rd 166 0.000068315s
	[INFO] 10.244.0.21:59559 - 60172 "A IN hello-world-app.default.svc.cluster.local.us-east-2.compute.internal. udp 86 false 512" NXDOMAIN qr,rd,ra 86 0.001456116s
	[INFO] 10.244.0.21:36722 - 27412 "A IN hello-world-app.default.svc.cluster.local.us-east-2.compute.internal. udp 86 false 512" NXDOMAIN qr,rd,ra 86 0.0042309s
	[INFO] 10.244.0.21:36722 - 60285 "AAAA IN hello-world-app.default.svc.cluster.local.us-east-2.compute.internal. udp 86 false 512" NXDOMAIN qr,rd,ra 86 0.001097978s
	[INFO] 10.244.0.21:59559 - 61805 "AAAA IN hello-world-app.default.svc.cluster.local.us-east-2.compute.internal. udp 86 false 512" NXDOMAIN qr,rd,ra 86 0.002556449s
	[INFO] 10.244.0.21:59559 - 16147 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.000141486s
	[INFO] 10.244.0.21:36722 - 22461 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.000064999s
	[INFO] 10.244.0.21:57285 - 59707 "A IN hello-world-app.default.svc.cluster.local.ingress-nginx.svc.cluster.local. udp 91 false 512" NXDOMAIN qr,aa,rd 184 0.000118414s
	[INFO] 10.244.0.21:45913 - 13762 "A IN hello-world-app.default.svc.cluster.local.ingress-nginx.svc.cluster.local. udp 91 false 512" NXDOMAIN qr,aa,rd 184 0.000060487s
	[INFO] 10.244.0.21:57285 - 15512 "AAAA IN hello-world-app.default.svc.cluster.local.ingress-nginx.svc.cluster.local. udp 91 false 512" NXDOMAIN qr,aa,rd 184 0.000055572s
	[INFO] 10.244.0.21:45913 - 40115 "AAAA IN hello-world-app.default.svc.cluster.local.ingress-nginx.svc.cluster.local. udp 91 false 512" NXDOMAIN qr,aa,rd 184 0.000077143s
	[INFO] 10.244.0.21:57285 - 45360 "A IN hello-world-app.default.svc.cluster.local.svc.cluster.local. udp 77 false 512" NXDOMAIN qr,aa,rd 170 0.000043921s
	[INFO] 10.244.0.21:45913 - 41249 "A IN hello-world-app.default.svc.cluster.local.svc.cluster.local. udp 77 false 512" NXDOMAIN qr,aa,rd 170 0.000317515s
	[INFO] 10.244.0.21:57285 - 62178 "AAAA IN hello-world-app.default.svc.cluster.local.svc.cluster.local. udp 77 false 512" NXDOMAIN qr,aa,rd 170 0.000053209s
	[INFO] 10.244.0.21:45913 - 14460 "AAAA IN hello-world-app.default.svc.cluster.local.svc.cluster.local. udp 77 false 512" NXDOMAIN qr,aa,rd 170 0.000093643s
	[INFO] 10.244.0.21:45913 - 54981 "A IN hello-world-app.default.svc.cluster.local.cluster.local. udp 73 false 512" NXDOMAIN qr,aa,rd 166 0.000054686s
	[INFO] 10.244.0.21:57285 - 1562 "A IN hello-world-app.default.svc.cluster.local.cluster.local. udp 73 false 512" NXDOMAIN qr,aa,rd 166 0.00009887s
	[INFO] 10.244.0.21:57285 - 58947 "AAAA IN hello-world-app.default.svc.cluster.local.cluster.local. udp 73 false 512" NXDOMAIN qr,aa,rd 166 0.00007085s
	[INFO] 10.244.0.21:45913 - 40651 "AAAA IN hello-world-app.default.svc.cluster.local.cluster.local. udp 73 false 512" NXDOMAIN qr,aa,rd 166 0.000167766s
	[INFO] 10.244.0.21:57285 - 37296 "A IN hello-world-app.default.svc.cluster.local.us-east-2.compute.internal. udp 86 false 512" NXDOMAIN qr,rd,ra 86 0.001666836s
	[INFO] 10.244.0.21:45913 - 32504 "A IN hello-world-app.default.svc.cluster.local.us-east-2.compute.internal. udp 86 false 512" NXDOMAIN qr,rd,ra 86 0.001799518s
	[INFO] 10.244.0.21:57285 - 2249 "AAAA IN hello-world-app.default.svc.cluster.local.us-east-2.compute.internal. udp 86 false 512" NXDOMAIN qr,rd,ra 86 0.004449422s
	[INFO] 10.244.0.21:45913 - 59012 "AAAA IN hello-world-app.default.svc.cluster.local.us-east-2.compute.internal. udp 86 false 512" NXDOMAIN qr,rd,ra 86 0.004617779s
	[INFO] 10.244.0.21:57285 - 59096 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.000076642s
	[INFO] 10.244.0.21:45913 - 58622 "A IN hello-world-app.default.svc.cluster.local. udp 59 false 512" NOERROR qr,aa,rd 116 0.00010856s
	
	
	==> describe nodes <==
	Name:               addons-155517
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=addons-155517
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=b003e6195fd8aae2e8757a7316e2960f465339c8
	                    minikube.k8s.io/name=addons-155517
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_07_04T01_09_00_0700
	                    minikube.k8s.io/version=v1.33.1
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	                    topology.hostpath.csi/node=addons-155517
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Thu, 04 Jul 2024 01:08:57 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  addons-155517
	  AcquireTime:     <unset>
	  RenewTime:       Thu, 04 Jul 2024 01:48:07 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Thu, 04 Jul 2024 01:45:44 +0000   Thu, 04 Jul 2024 01:08:54 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Thu, 04 Jul 2024 01:45:44 +0000   Thu, 04 Jul 2024 01:08:54 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Thu, 04 Jul 2024 01:45:44 +0000   Thu, 04 Jul 2024 01:08:54 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Thu, 04 Jul 2024 01:45:44 +0000   Thu, 04 Jul 2024 01:09:10 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    addons-155517
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022360Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022360Ki
	  pods:               110
	System Info:
	  Machine ID:                 9fd27c19cbdd40e797fc2e621404e195
	  System UUID:                ff208bb9-dbd1-4ea7-9a2d-001b90e3d2d4
	  Boot ID:                    8f650b57-d36f-4952-bd7f-5577bab5f375
	  Kernel Version:             5.15.0-1064-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.18
	  Kubelet Version:            v1.30.2
	  Kube-Proxy Version:         v1.30.2
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (17 in total)
	  Namespace                   Name                                     CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                     ------------  ----------  ---------------  -------------  ---
	  default                     hello-world-app-86c47465fc-kcpm6         0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         33m
	  default                     nginx                                    0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         33m
	  gadget                      gadget-9pgwd                             0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         38m
	  gcp-auth                    gcp-auth-5db96cd9b4-s44p2                0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         37m
	  headlamp                    headlamp-7867546754-xmxdd                0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         37m
	  kube-system                 coredns-7db6d8ff4d-5x2l7                 100m (5%!)(MISSING)     0 (0%!)(MISSING)      70Mi (0%!)(MISSING)        170Mi (2%!)(MISSING)     38m
	  kube-system                 etcd-addons-155517                       100m (5%!)(MISSING)     0 (0%!)(MISSING)      100Mi (1%!)(MISSING)       0 (0%!)(MISSING)         39m
	  kube-system                 kindnet-7qr8x                            100m (5%!)(MISSING)     100m (5%!)(MISSING)   50Mi (0%!)(MISSING)        50Mi (0%!)(MISSING)      38m
	  kube-system                 kube-apiserver-addons-155517             250m (12%!)(MISSING)    0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         39m
	  kube-system                 kube-controller-manager-addons-155517    200m (10%!)(MISSING)    0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         39m
	  kube-system                 kube-proxy-62r6j                         0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         38m
	  kube-system                 kube-scheduler-addons-155517             100m (5%!)(MISSING)     0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         39m
	  kube-system                 storage-provisioner                      0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         38m
	  volcano-system              volcano-admission-5f7844f7bc-kv4hh       0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         38m
	  volcano-system              volcano-controllers-59cb4746db-b7bd8     0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         38m
	  volcano-system              volcano-scheduler-844f6db89b-bwtk7       0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         38m
	  yakd-dashboard              yakd-dashboard-799879c74f-gdj2b          0 (0%!)(MISSING)        0 (0%!)(MISSING)      128Mi (1%!)(MISSING)       256Mi (3%!)(MISSING)     38m
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%!)(MISSING)  100m (5%!)(MISSING)
	  memory             348Mi (4%!)(MISSING)  476Mi (6%!)(MISSING)
	  ephemeral-storage  0 (0%!)(MISSING)      0 (0%!)(MISSING)
	  hugepages-1Gi      0 (0%!)(MISSING)      0 (0%!)(MISSING)
	  hugepages-2Mi      0 (0%!)(MISSING)      0 (0%!)(MISSING)
	  hugepages-32Mi     0 (0%!)(MISSING)      0 (0%!)(MISSING)
	  hugepages-64Ki     0 (0%!)(MISSING)      0 (0%!)(MISSING)
	Events:
	  Type    Reason                   Age   From             Message
	  ----    ------                   ----  ----             -------
	  Normal  Starting                 38m   kube-proxy       
	  Normal  Starting                 39m   kubelet          Starting kubelet.
	  Normal  NodeHasSufficientMemory  39m   kubelet          Node addons-155517 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    39m   kubelet          Node addons-155517 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     39m   kubelet          Node addons-155517 status is now: NodeHasSufficientPID
	  Normal  NodeNotReady             39m   kubelet          Node addons-155517 status is now: NodeNotReady
	  Normal  NodeAllocatableEnforced  39m   kubelet          Updated Node Allocatable limit across pods
	  Normal  NodeReady                39m   kubelet          Node addons-155517 status is now: NodeReady
	  Normal  RegisteredNode           39m   node-controller  Node addons-155517 event: Registered Node addons-155517 in Controller
	
	
	==> dmesg <==
	[  +0.001017] FS-Cache: O-key=[8] '1671ed0000000000'
	[  +0.000678] FS-Cache: N-cookie c=00000030 [p=00000027 fl=2 nc=0 na=1]
	[  +0.000893] FS-Cache: N-cookie d=00000000915207cd{9p.inode} n=00000000b09744a6
	[  +0.001005] FS-Cache: N-key=[8] '1671ed0000000000'
	[  +0.002621] FS-Cache: Duplicate cookie detected
	[  +0.000687] FS-Cache: O-cookie c=0000002a [p=00000027 fl=226 nc=0 na=1]
	[  +0.000917] FS-Cache: O-cookie d=00000000915207cd{9p.inode} n=000000001e9df579
	[  +0.001010] FS-Cache: O-key=[8] '1671ed0000000000'
	[  +0.000678] FS-Cache: N-cookie c=00000031 [p=00000027 fl=2 nc=0 na=1]
	[  +0.000899] FS-Cache: N-cookie d=00000000915207cd{9p.inode} n=00000000dd6a8763
	[  +0.001363] FS-Cache: N-key=[8] '1671ed0000000000'
	[  +2.399414] FS-Cache: Duplicate cookie detected
	[  +0.000668] FS-Cache: O-cookie c=00000028 [p=00000027 fl=226 nc=0 na=1]
	[  +0.000927] FS-Cache: O-cookie d=00000000915207cd{9p.inode} n=00000000807950c0
	[  +0.001002] FS-Cache: O-key=[8] '1571ed0000000000'
	[  +0.000681] FS-Cache: N-cookie c=00000033 [p=00000027 fl=2 nc=0 na=1]
	[  +0.000944] FS-Cache: N-cookie d=00000000915207cd{9p.inode} n=00000000b09744a6
	[  +0.000997] FS-Cache: N-key=[8] '1571ed0000000000'
	[  +0.414782] FS-Cache: Duplicate cookie detected
	[  +0.000671] FS-Cache: O-cookie c=0000002d [p=00000027 fl=226 nc=0 na=1]
	[  +0.000917] FS-Cache: O-cookie d=00000000915207cd{9p.inode} n=0000000007b93ca1
	[  +0.000982] FS-Cache: O-key=[8] '1b71ed0000000000'
	[  +0.000661] FS-Cache: N-cookie c=00000034 [p=00000027 fl=2 nc=0 na=1]
	[  +0.000895] FS-Cache: N-cookie d=00000000915207cd{9p.inode} n=00000000f6098f89
	[  +0.000977] FS-Cache: N-key=[8] '1b71ed0000000000'
	
	
	==> etcd [f517c28ada41915907ef5e67d80504dca45673592d66bc4f85c35c8241aa8787] <==
	{"level":"info","ts":"2024-07-04T01:08:53.836016Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-07-04T01:08:53.836115Z","caller":"etcdserver/server.go:2602","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-07-04T01:08:53.836033Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-07-04T01:08:53.84114Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-07-04T01:08:53.856473Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-07-04T01:08:53.859549Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-07-04T01:08:53.860789Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-07-04T01:18:55.445257Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":2129}
	{"level":"info","ts":"2024-07-04T01:18:55.529596Z","caller":"mvcc/kvstore_compaction.go:68","msg":"finished scheduled compaction","compact-revision":2129,"took":"83.494008ms","hash":1098626703,"current-db-size-bytes":10215424,"current-db-size":"10 MB","current-db-size-in-use-bytes":3559424,"current-db-size-in-use":"3.6 MB"}
	{"level":"info","ts":"2024-07-04T01:18:55.529649Z","caller":"mvcc/hash.go:137","msg":"storing new hash","hash":1098626703,"revision":2129,"compact-revision":-1}
	{"level":"info","ts":"2024-07-04T01:23:55.450555Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":2589}
	{"level":"info","ts":"2024-07-04T01:23:55.469801Z","caller":"mvcc/kvstore_compaction.go:68","msg":"finished scheduled compaction","compact-revision":2589,"took":"18.558617ms","hash":2017161745,"current-db-size-bytes":10215424,"current-db-size":"10 MB","current-db-size-in-use-bytes":2666496,"current-db-size-in-use":"2.7 MB"}
	{"level":"info","ts":"2024-07-04T01:23:55.469852Z","caller":"mvcc/hash.go:137","msg":"storing new hash","hash":2017161745,"revision":2589,"compact-revision":2129}
	{"level":"info","ts":"2024-07-04T01:28:55.456024Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":2832}
	{"level":"info","ts":"2024-07-04T01:28:55.459705Z","caller":"mvcc/kvstore_compaction.go:68","msg":"finished scheduled compaction","compact-revision":2832,"took":"3.051986ms","hash":1534132580,"current-db-size-bytes":10215424,"current-db-size":"10 MB","current-db-size-in-use-bytes":2818048,"current-db-size-in-use":"2.8 MB"}
	{"level":"info","ts":"2024-07-04T01:28:55.459751Z","caller":"mvcc/hash.go:137","msg":"storing new hash","hash":1534132580,"revision":2832,"compact-revision":2589}
	{"level":"info","ts":"2024-07-04T01:33:55.46251Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":3098}
	{"level":"info","ts":"2024-07-04T01:33:55.466729Z","caller":"mvcc/kvstore_compaction.go:68","msg":"finished scheduled compaction","compact-revision":3098,"took":"3.383401ms","hash":67623355,"current-db-size-bytes":10215424,"current-db-size":"10 MB","current-db-size-in-use-bytes":2715648,"current-db-size-in-use":"2.7 MB"}
	{"level":"info","ts":"2024-07-04T01:33:55.466782Z","caller":"mvcc/hash.go:137","msg":"storing new hash","hash":67623355,"revision":3098,"compact-revision":2832}
	{"level":"info","ts":"2024-07-04T01:38:55.468824Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":3340}
	{"level":"info","ts":"2024-07-04T01:38:55.472382Z","caller":"mvcc/kvstore_compaction.go:68","msg":"finished scheduled compaction","compact-revision":3340,"took":"2.972907ms","hash":588078283,"current-db-size-bytes":10215424,"current-db-size":"10 MB","current-db-size-in-use-bytes":2654208,"current-db-size-in-use":"2.7 MB"}
	{"level":"info","ts":"2024-07-04T01:38:55.472428Z","caller":"mvcc/hash.go:137","msg":"storing new hash","hash":588078283,"revision":3340,"compact-revision":3098}
	{"level":"info","ts":"2024-07-04T01:43:55.474859Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":3584}
	{"level":"info","ts":"2024-07-04T01:43:55.478468Z","caller":"mvcc/kvstore_compaction.go:68","msg":"finished scheduled compaction","compact-revision":3584,"took":"3.057919ms","hash":2434383861,"current-db-size-bytes":10215424,"current-db-size":"10 MB","current-db-size-in-use-bytes":2641920,"current-db-size-in-use":"2.6 MB"}
	{"level":"info","ts":"2024-07-04T01:43:55.478528Z","caller":"mvcc/hash.go:137","msg":"storing new hash","hash":2434383861,"revision":3584,"compact-revision":3340}
	
	
	==> gcp-auth [176e35400e6b4d15467a19b19558445b7d1b5dbec42ad8098a036e029f6b3077] <==
	2024/07/04 01:10:54 Ready to write response ...
	2024/07/04 01:10:54 Ready to marshal response ...
	2024/07/04 01:10:54 Ready to write response ...
	2024/07/04 01:10:54 Ready to marshal response ...
	2024/07/04 01:10:54 Ready to write response ...
	2024/07/04 01:11:04 Ready to marshal response ...
	2024/07/04 01:11:04 Ready to write response ...
	2024/07/04 01:11:19 Ready to marshal response ...
	2024/07/04 01:11:19 Ready to write response ...
	2024/07/04 01:11:19 Ready to marshal response ...
	2024/07/04 01:11:19 Ready to write response ...
	2024/07/04 01:11:20 Ready to marshal response ...
	2024/07/04 01:11:20 Ready to write response ...
	2024/07/04 01:11:20 Ready to marshal response ...
	2024/07/04 01:11:20 Ready to write response ...
	2024/07/04 01:11:28 Ready to marshal response ...
	2024/07/04 01:11:28 Ready to write response ...
	2024/07/04 01:12:21 Ready to marshal response ...
	2024/07/04 01:12:21 Ready to write response ...
	2024/07/04 01:12:36 Ready to marshal response ...
	2024/07/04 01:12:36 Ready to write response ...
	2024/07/04 01:14:31 Ready to marshal response ...
	2024/07/04 01:14:31 Ready to write response ...
	2024/07/04 01:14:40 Ready to marshal response ...
	2024/07/04 01:14:40 Ready to write response ...
	
	
	==> kernel <==
	 01:48:13 up  7:30,  0 users,  load average: 0.21, 0.13, 0.45
	Linux addons-155517 5.15.0-1064-aws #70~20.04.1-Ubuntu SMP Thu Jun 27 14:52:48 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [67cab13d85edc9d5c0f37e6bf189f122a928a79d09306ee9a7e93dbc16acca46] <==
	I0704 01:46:07.698094       1 main.go:227] handling current node
	I0704 01:46:17.702425       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:46:17.702450       1 main.go:227] handling current node
	I0704 01:46:27.712389       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:46:27.712427       1 main.go:227] handling current node
	I0704 01:46:37.716647       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:46:37.716681       1 main.go:227] handling current node
	I0704 01:46:47.728020       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:46:47.728050       1 main.go:227] handling current node
	I0704 01:46:57.731410       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:46:57.731450       1 main.go:227] handling current node
	I0704 01:47:07.743703       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:47:07.743729       1 main.go:227] handling current node
	I0704 01:47:17.747188       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:47:17.747216       1 main.go:227] handling current node
	I0704 01:47:27.757855       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:47:27.757886       1 main.go:227] handling current node
	I0704 01:47:37.767154       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:47:37.767183       1 main.go:227] handling current node
	I0704 01:47:47.770838       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:47:47.770867       1 main.go:227] handling current node
	I0704 01:47:57.783084       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:47:57.783112       1 main.go:227] handling current node
	I0704 01:48:07.796307       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:48:07.796350       1 main.go:227] handling current node
	
	
	==> kube-apiserver [d5678b588829bae555d8757bb9d8a3f9d182137807bb650acee005f9f590f5d2] <==
	I0704 01:11:20.663546       1 controller.go:615] quota admission added evaluator for: podgroups.scheduling.volcano.sh
	E0704 01:11:44.500768       1 authentication.go:73] "Unable to authenticate the request" err="[invalid bearer token, serviceaccounts \"local-path-provisioner-service-account\" not found]"
	I0704 01:12:33.267448       1 controller.go:615] quota admission added evaluator for: volumesnapshots.snapshot.storage.k8s.io
	E0704 01:12:45.514392       1 authentication.go:73] "Unable to authenticate the request" err="[invalid bearer token, serviceaccounts \"csi-hostpathplugin-sa\" not found]"
	I0704 01:12:52.070076       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
	I0704 01:12:52.070121       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
	I0704 01:12:52.111653       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
	I0704 01:12:52.111699       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
	I0704 01:12:52.123212       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
	I0704 01:12:52.123255       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
	I0704 01:12:52.144000       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
	I0704 01:12:52.144039       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
	I0704 01:12:52.193146       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
	I0704 01:12:52.193191       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
	W0704 01:12:53.114139       1 cacher.go:168] Terminating all watchers from cacher volumesnapshotclasses.snapshot.storage.k8s.io
	W0704 01:12:53.193357       1 cacher.go:168] Terminating all watchers from cacher volumesnapshotcontents.snapshot.storage.k8s.io
	W0704 01:12:53.202807       1 cacher.go:168] Terminating all watchers from cacher volumesnapshots.snapshot.storage.k8s.io
	I0704 01:12:58.929384       1 handler.go:286] Adding GroupVersion gadget.kinvolk.io v1alpha1 to ResourceManager
	W0704 01:12:59.966476       1 cacher.go:168] Terminating all watchers from cacher traces.gadget.kinvolk.io
	I0704 01:14:31.058045       1 controller.go:615] quota admission added evaluator for: ingresses.networking.k8s.io
	I0704 01:14:31.339872       1 alloc.go:330] "allocated clusterIPs" service="default/nginx" clusterIPs={"IPv4":"10.109.38.209"}
	I0704 01:14:40.977207       1 alloc.go:330] "allocated clusterIPs" service="default/hello-world-app" clusterIPs={"IPv4":"10.109.173.179"}
	E0704 01:14:58.284557       1 authentication.go:73] "Unable to authenticate the request" err="[invalid bearer token, serviceaccounts \"ingress-nginx\" not found]"
	E0704 01:14:58.373464       1 authentication.go:73] "Unable to authenticate the request" err="[invalid bearer token, serviceaccounts \"ingress-nginx\" not found]"
	I0704 01:15:11.298754       1 controller.go:129] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Nothing (removed from the queue).
	
	
	==> kube-controller-manager [d26768fcef3f904fbe4d8309b2336e0d0536a0636f241b26984323c589bd890e] <==
	W0704 01:46:09.221635       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:46:09.221688       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	I0704 01:46:16.876529       1 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/hello-world-app-86c47465fc" duration="43.314µs"
	E0704 01:46:18.873580       1 namespace_controller.go:159] deletion of namespace gadget failed: unexpected items still remain in namespace: gadget for gvr: /v1, Resource=pods
	W0704 01:46:24.449510       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:46:24.449784       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	W0704 01:46:33.375405       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:46:33.375444       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	W0704 01:46:44.411610       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:46:44.411713       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	W0704 01:46:46.175993       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:46:46.176042       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	W0704 01:47:00.798918       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:47:00.798967       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	W0704 01:47:16.855863       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:47:16.855992       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:47:19.007825       1 namespace_controller.go:159] deletion of namespace gadget failed: unexpected items still remain in namespace: gadget for gvr: /v1, Resource=pods
	W0704 01:47:28.858683       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:47:28.858880       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	W0704 01:47:37.314611       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:47:37.314654       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	W0704 01:47:44.707410       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:47:44.707451       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	W0704 01:48:07.972815       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:48:07.972863       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	
	
	==> kube-proxy [8a696cf44b3b1b9718c7bc5215c5dd91d048a8496501845e4b243c3a46ba4f90] <==
	I0704 01:09:15.745880       1 server_linux.go:69] "Using iptables proxy"
	I0704 01:09:15.772902       1 server.go:1062] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	I0704 01:09:15.799195       1 server.go:659] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0704 01:09:15.799240       1 server_linux.go:165] "Using iptables Proxier"
	I0704 01:09:15.804393       1 server_linux.go:511] "Detect-local-mode set to ClusterCIDR, but no cluster CIDR for family" ipFamily="IPv6"
	I0704 01:09:15.804419       1 server_linux.go:528] "Defaulting to no-op detect-local"
	I0704 01:09:15.804446       1 proxier.go:243] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses"
	I0704 01:09:15.804673       1 server.go:872] "Version info" version="v1.30.2"
	I0704 01:09:15.804688       1 server.go:874] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0704 01:09:15.805805       1 config.go:192] "Starting service config controller"
	I0704 01:09:15.805822       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0704 01:09:15.805846       1 config.go:101] "Starting endpoint slice config controller"
	I0704 01:09:15.805850       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0704 01:09:15.808392       1 config.go:319] "Starting node config controller"
	I0704 01:09:15.808406       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0704 01:09:15.906774       1 shared_informer.go:320] Caches are synced for service config
	I0704 01:09:15.906727       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0704 01:09:15.908585       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [566fe4aca8adb536ad06b4727d9447ef68bd730ff7b9e8ddd94c6dfc6a8de11a] <==
	W0704 01:08:57.400228       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0704 01:08:57.400246       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	W0704 01:08:57.400636       1 reflector.go:547] runtime/asm_arm64.s:1222: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0704 01:08:57.400664       1 reflector.go:150] runtime/asm_arm64.s:1222: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	W0704 01:08:57.400873       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0704 01:08:57.400898       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	W0704 01:08:57.401054       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0704 01:08:57.401076       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	W0704 01:08:57.401091       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0704 01:08:57.401108       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	W0704 01:08:57.401143       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0704 01:08:57.401163       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	W0704 01:08:57.401174       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0704 01:08:57.401182       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	W0704 01:08:58.239191       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0704 01:08:58.239466       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	W0704 01:08:58.379662       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0704 01:08:58.379764       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	W0704 01:08:58.415451       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0704 01:08:58.415596       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	W0704 01:08:58.433246       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0704 01:08:58.433288       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	W0704 01:08:58.520896       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0704 01:08:58.520940       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	I0704 01:08:58.883633       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Jul 04 01:47:41 addons-155517 kubelet[1544]: E0704 01:47:41.954521    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"32a4288c6e6f827c0275815960a9e61143effe5242a8f9d8adf84f8dd06cfcb1\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:47:41 addons-155517 kubelet[1544]: E0704 01:47:41.972147    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"e0392dde63c852de5ab46e7c315d883920d98a41db1b87af7d9c8c4f15793b54\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:47:41 addons-155517 kubelet[1544]: E0704 01:47:41.989956    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"b53b41d076c30da35a71b409003922b3fe5131262d77cf2ca5433decc80fd543\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:47:46 addons-155517 kubelet[1544]: E0704 01:47:46.957589    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"09f49646a933d0c9284e7d84b1fe97ef868037717df291e8714cc0feba14c2f7\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:47:46 addons-155517 kubelet[1544]: E0704 01:47:46.973447    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"ced7d4327173d7a5c66a24be47b6854c2d5f0729122b09e439c21227e3a4e55b\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:47:46 addons-155517 kubelet[1544]: E0704 01:47:46.989216    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"337d184d93574455508fa55ec53347e213804c349bc0850f1f5beb5b299ea36e\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:47:48 addons-155517 kubelet[1544]: I0704 01:47:48.863258    1544 scope.go:117] "RemoveContainer" containerID="ba40c8fe3b49d048e58043a7632549b22ad611618cea289799de1f1b96bdba02"
	Jul 04 01:47:48 addons-155517 kubelet[1544]: E0704 01:47:48.863594    1544 pod_workers.go:1298] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"hello-world-app\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=hello-world-app pod=hello-world-app-86c47465fc-kcpm6_default(c1e9f2c8-cd69-4f67-9fcc-90fb2499e32f)\"" pod="default/hello-world-app-86c47465fc-kcpm6" podUID="c1e9f2c8-cd69-4f67-9fcc-90fb2499e32f"
	Jul 04 01:47:51 addons-155517 kubelet[1544]: E0704 01:47:51.957649    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"a2670010e0cc7e0feb0bbb11c346eee03fc7078872e9252957c0fe98f4785adc\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:47:51 addons-155517 kubelet[1544]: E0704 01:47:51.972501    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"d9b73eddc90f11e585a18df9844c560327a53e00171498dfdd71046f759b270e\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:47:51 addons-155517 kubelet[1544]: E0704 01:47:51.988441    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"92ad3762f8736517f2270992f8715cc43f5b417f21d6e63fb556997480df0dec\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:47:56 addons-155517 kubelet[1544]: E0704 01:47:56.958460    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"c9c541a95d56708691cf2ec4fcd930fdda4af7bddafd5748b6c6c297d4e756e6\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:47:56 addons-155517 kubelet[1544]: E0704 01:47:56.982901    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"629515ff3da8e89e51042374b60f8bdde9cddad0d16d41db3a6898d79212623b\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:47:57 addons-155517 kubelet[1544]: E0704 01:47:57.016328    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"7ad8523bae3186a7b17b652015bbe2e3a4aab931b6196cc450f86aaab52178e1\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:48:01 addons-155517 kubelet[1544]: E0704 01:48:01.956982    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"b721da210541696b1728e332a22e66092fd53c59c6410d24d6fec8fe1bbda10b\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:48:01 addons-155517 kubelet[1544]: E0704 01:48:01.972667    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"7d4bbb3fc04f460cd0dd3ec4fd5956d338c5e586510b2d6b1cdd24137c2c2bf7\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:48:01 addons-155517 kubelet[1544]: E0704 01:48:01.985497    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"0505d507e296d33f44cbc780b86b149a2d6db793ba99c7c20375c236259cbfc3\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:48:02 addons-155517 kubelet[1544]: I0704 01:48:02.862887    1544 scope.go:117] "RemoveContainer" containerID="ba40c8fe3b49d048e58043a7632549b22ad611618cea289799de1f1b96bdba02"
	Jul 04 01:48:02 addons-155517 kubelet[1544]: E0704 01:48:02.863197    1544 pod_workers.go:1298] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"hello-world-app\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=hello-world-app pod=hello-world-app-86c47465fc-kcpm6_default(c1e9f2c8-cd69-4f67-9fcc-90fb2499e32f)\"" pod="default/hello-world-app-86c47465fc-kcpm6" podUID="c1e9f2c8-cd69-4f67-9fcc-90fb2499e32f"
	Jul 04 01:48:06 addons-155517 kubelet[1544]: E0704 01:48:06.954600    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"1492eb8d23e14493ff75a3d3be41f69ee269490a354abb1f19fd94f49a2c828b\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:48:06 addons-155517 kubelet[1544]: E0704 01:48:06.968905    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"9d40d1befda15c3b466c946bdc8a148e6400460d8acc849d9325fcbb376a01dd\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:48:06 addons-155517 kubelet[1544]: E0704 01:48:06.981437    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"b40577cf1d1f68b4ce117a4b7956384a363841fcadc7bc55412fee4e0101c29b\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:48:11 addons-155517 kubelet[1544]: E0704 01:48:11.955955    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"313306d3abb4f2d025c2082c7e741daa7818bbdf58a86d8404a94e112ff16203\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:48:11 addons-155517 kubelet[1544]: E0704 01:48:11.969681    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"174d0e08b879df5aece3faf447c8ed5620f4547415c4c300f25cbb21c471d3e5\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:48:11 addons-155517 kubelet[1544]: E0704 01:48:11.983958    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"d8f2c69db1d36ab9c269785a6342f1dfde1f28c9730b107a2af6ca0aebbaa8b0\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	
	
	==> storage-provisioner [33899ba7d59110e40a8409b27a7737ac3b4858d348229b496df42db6a119852b] <==
	I0704 01:09:20.859954       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0704 01:09:20.902425       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0704 01:09:20.902503       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0704 01:09:20.962139       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0704 01:09:20.962380       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_addons-155517_8b223e9f-4dc4-474e-ac36-2da123f117db!
	I0704 01:09:20.963772       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"58c57b6c-af21-4158-b30e-a900c384acaa", APIVersion:"v1", ResourceVersion:"568", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' addons-155517_8b223e9f-4dc4-474e-ac36-2da123f117db became leader
	I0704 01:09:21.062827       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_addons-155517_8b223e9f-4dc4-474e-ac36-2da123f117db!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p addons-155517 -n addons-155517
helpers_test.go:261: (dbg) Run:  kubectl --context addons-155517 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:272: non-running pods: test-job-nginx-0
helpers_test.go:274: ======> post-mortem[TestAddons/parallel/InspektorGadget]: describe non-running pods <======
helpers_test.go:277: (dbg) Run:  kubectl --context addons-155517 describe pod test-job-nginx-0
helpers_test.go:277: (dbg) Non-zero exit: kubectl --context addons-155517 describe pod test-job-nginx-0: exit status 1 (97.248036ms)

                                                
                                                
** stderr ** 
	Error from server (NotFound): pods "test-job-nginx-0" not found

                                                
                                                
** /stderr **
helpers_test.go:279: kubectl --context addons-155517 describe pod test-job-nginx-0: exit status 1
--- FAIL: TestAddons/parallel/InspektorGadget (2122.19s)

                                                
                                    
x
+
TestAddons/parallel/Volcano (199.63s)

                                                
                                                
=== RUN   TestAddons/parallel/Volcano
=== PAUSE TestAddons/parallel/Volcano

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Volcano
addons_test.go:897: volcano-admission stabilized in 6.23423ms
addons_test.go:905: volcano-controller stabilized in 6.874446ms
addons_test.go:889: volcano-scheduler stabilized in 6.963634ms
addons_test.go:911: (dbg) TestAddons/parallel/Volcano: waiting 6m0s for pods matching "app=volcano-scheduler" in namespace "volcano-system" ...
helpers_test.go:344: "volcano-scheduler-844f6db89b-bwtk7" [047b284e-0744-426f-a7fa-d0fdac2983ac] Running
addons_test.go:911: (dbg) TestAddons/parallel/Volcano: app=volcano-scheduler healthy within 6.004011653s
addons_test.go:915: (dbg) TestAddons/parallel/Volcano: waiting 6m0s for pods matching "app=volcano-admission" in namespace "volcano-system" ...
helpers_test.go:344: "volcano-admission-5f7844f7bc-kv4hh" [4e9c70df-e035-477f-b9af-5dcb7848025e] Running
addons_test.go:915: (dbg) TestAddons/parallel/Volcano: app=volcano-admission healthy within 5.004325474s
addons_test.go:919: (dbg) TestAddons/parallel/Volcano: waiting 6m0s for pods matching "app=volcano-controller" in namespace "volcano-system" ...
helpers_test.go:344: "volcano-controllers-59cb4746db-b7bd8" [d747685a-5823-475c-a4c3-35c674412604] Running
addons_test.go:919: (dbg) TestAddons/parallel/Volcano: app=volcano-controller healthy within 5.004233325s
addons_test.go:924: (dbg) Run:  kubectl --context addons-155517 delete -n volcano-system job volcano-admission-init
addons_test.go:930: (dbg) Run:  kubectl --context addons-155517 create -f testdata/vcjob.yaml
addons_test.go:938: (dbg) Run:  kubectl --context addons-155517 get vcjob -n my-volcano
addons_test.go:956: (dbg) TestAddons/parallel/Volcano: waiting 3m0s for pods matching "volcano.sh/job-name=test-job" in namespace "my-volcano" ...
helpers_test.go:344: "test-job-nginx-0" [8a2d863e-51fd-41e3-865a-5cec48aecbc6] Pending
helpers_test.go:344: "test-job-nginx-0" [8a2d863e-51fd-41e3-865a-5cec48aecbc6] Pending: PodScheduled:Unschedulable (0/1 nodes are unavailable: 1 Insufficient cpu.)
helpers_test.go:329: TestAddons/parallel/Volcano: WARNING: pod list for "my-volcano" "volcano.sh/job-name=test-job" returned: client rate limiter Wait returned an error: context deadline exceeded
addons_test.go:956: ***** TestAddons/parallel/Volcano: pod "volcano.sh/job-name=test-job" failed to start within 3m0s: context deadline exceeded ****
addons_test.go:956: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p addons-155517 -n addons-155517
addons_test.go:956: TestAddons/parallel/Volcano: showing logs for failed pods as of 2024-07-04 01:14:21.086617981 +0000 UTC m=+386.856468463
addons_test.go:956: (dbg) Run:  kubectl --context addons-155517 describe po test-job-nginx-0 -n my-volcano
addons_test.go:956: (dbg) kubectl --context addons-155517 describe po test-job-nginx-0 -n my-volcano:
Name:             test-job-nginx-0
Namespace:        my-volcano
Priority:         0
Service Account:  default
Node:             <none>
Labels:           volcano.sh/job-name=test-job
volcano.sh/job-namespace=my-volcano
volcano.sh/queue-name=test
volcano.sh/task-index=0
volcano.sh/task-spec=nginx
Annotations:      scheduling.k8s.io/group-name: test-job-a6fdcca7-751f-49a6-bb00-9acdcc7176f1
volcano.sh/job-name: test-job
volcano.sh/job-version: 0
volcano.sh/queue-name: test
volcano.sh/task-index: 0
volcano.sh/task-spec: nginx
volcano.sh/template-uid: test-job-nginx
Status:           Pending
IP:               
IPs:              <none>
Controlled By:    Job/test-job
Containers:
nginx:
Image:      nginx:latest
Port:       <none>
Host Port:  <none>
Command:
sleep
10m
Limits:
cpu:  1
Requests:
cpu:  1
Environment:
GOOGLE_APPLICATION_CREDENTIALS:  /google-app-creds.json
PROJECT_ID:                      this_is_fake
GCP_PROJECT:                     this_is_fake
GCLOUD_PROJECT:                  this_is_fake
GOOGLE_CLOUD_PROJECT:            this_is_fake
CLOUDSDK_CORE_PROJECT:           this_is_fake
Mounts:
/google-app-creds.json from gcp-creds (ro)
/var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-jhs6w (ro)
Conditions:
Type           Status
PodScheduled   False 
Volumes:
kube-api-access-jhs6w:
Type:                    Projected (a volume that contains injected data from multiple sources)
TokenExpirationSeconds:  3607
ConfigMapName:           kube-root-ca.crt
ConfigMapOptional:       <nil>
DownwardAPI:             true
gcp-creds:
Type:          HostPath (bare host directory volume)
Path:          /var/lib/minikube/google_application_credentials.json
HostPathType:  File
QoS Class:         Burstable
Node-Selectors:    <none>
Tolerations:       node.kubernetes.io/not-ready:NoExecute op=Exists for 300s
node.kubernetes.io/unreachable:NoExecute op=Exists for 300s
Events:
Type     Reason            Age   From     Message
----     ------            ----  ----     -------
Warning  FailedScheduling  3m    volcano  0/1 nodes are unavailable: 1 Insufficient cpu.
addons_test.go:956: (dbg) Run:  kubectl --context addons-155517 logs test-job-nginx-0 -n my-volcano
addons_test.go:956: (dbg) kubectl --context addons-155517 logs test-job-nginx-0 -n my-volcano:
addons_test.go:957: failed waiting for test-local-path pod: volcano.sh/job-name=test-job within 3m0s: context deadline exceeded
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestAddons/parallel/Volcano]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect addons-155517
helpers_test.go:235: (dbg) docker inspect addons-155517:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "3fdcd90a73a7ea02e66574c10f20fe7e4895345c14c83582a4b20690b97b6908",
	        "Created": "2024-07-04T01:08:36.303265492Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1196940,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-07-04T01:08:36.435418596Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:fe62b5a5301065dd92924d274286e0d1b2227c557eb51c213d07169631b2b3f7",
	        "ResolvConfPath": "/var/lib/docker/containers/3fdcd90a73a7ea02e66574c10f20fe7e4895345c14c83582a4b20690b97b6908/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/3fdcd90a73a7ea02e66574c10f20fe7e4895345c14c83582a4b20690b97b6908/hostname",
	        "HostsPath": "/var/lib/docker/containers/3fdcd90a73a7ea02e66574c10f20fe7e4895345c14c83582a4b20690b97b6908/hosts",
	        "LogPath": "/var/lib/docker/containers/3fdcd90a73a7ea02e66574c10f20fe7e4895345c14c83582a4b20690b97b6908/3fdcd90a73a7ea02e66574c10f20fe7e4895345c14c83582a4b20690b97b6908-json.log",
	        "Name": "/addons-155517",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "addons-155517:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "addons-155517",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/0c9e9525005bbc1d92b91271f9bbc278c449163313acc66d3f355c17ed992908-init/diff:/var/lib/docker/overlay2/04be1cfb4b9b173c47d5bff32a15bd2c62951348a7d8ba248dee1fc574bba292/diff",
	                "MergedDir": "/var/lib/docker/overlay2/0c9e9525005bbc1d92b91271f9bbc278c449163313acc66d3f355c17ed992908/merged",
	                "UpperDir": "/var/lib/docker/overlay2/0c9e9525005bbc1d92b91271f9bbc278c449163313acc66d3f355c17ed992908/diff",
	                "WorkDir": "/var/lib/docker/overlay2/0c9e9525005bbc1d92b91271f9bbc278c449163313acc66d3f355c17ed992908/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "addons-155517",
	                "Source": "/var/lib/docker/volumes/addons-155517/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "addons-155517",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "addons-155517",
	                "name.minikube.sigs.k8s.io": "addons-155517",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "00149764ac28081fa271644bd8abd017b3859409342eb0cd27d7072d3bc248ad",
	            "SandboxKey": "/var/run/docker/netns/00149764ac28",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33941"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33942"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33945"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33943"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33944"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "addons-155517": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "664a751e340faab9d5bafc3b0d537e0c69162e18e1e2ac12a6117fe790e76074",
	                    "EndpointID": "8e02106e4b600acb13911d96d2636d06ef6a5e9c2d13c8b3302477a5d63292bc",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "addons-155517",
	                        "3fdcd90a73a7"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p addons-155517 -n addons-155517
helpers_test.go:244: <<< TestAddons/parallel/Volcano FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestAddons/parallel/Volcano]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p addons-155517 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p addons-155517 logs -n 25: (1.547977961s)
helpers_test.go:252: TestAddons/parallel/Volcano logs: 
-- stdout --
	
	==> Audit <==
	|---------|---------------------------------------------------------------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| Command |                                            Args                                             |        Profile         |  User   | Version |     Start Time      |      End Time       |
	|---------|---------------------------------------------------------------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| start   | -o=json --download-only                                                                     | download-only-327632   | jenkins | v1.33.1 | 04 Jul 24 01:07 UTC |                     |
	|         | -p download-only-327632                                                                     |                        |         |         |                     |                     |
	|         | --force --alsologtostderr                                                                   |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.20.0                                                                |                        |         |         |                     |                     |
	|         | --container-runtime=containerd                                                              |                        |         |         |                     |                     |
	|         | --driver=docker                                                                             |                        |         |         |                     |                     |
	|         | --container-runtime=containerd                                                              |                        |         |         |                     |                     |
	| delete  | --all                                                                                       | minikube               | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| delete  | -p download-only-327632                                                                     | download-only-327632   | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| start   | -o=json --download-only                                                                     | download-only-526823   | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC |                     |
	|         | -p download-only-526823                                                                     |                        |         |         |                     |                     |
	|         | --force --alsologtostderr                                                                   |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.30.2                                                                |                        |         |         |                     |                     |
	|         | --container-runtime=containerd                                                              |                        |         |         |                     |                     |
	|         | --driver=docker                                                                             |                        |         |         |                     |                     |
	|         | --container-runtime=containerd                                                              |                        |         |         |                     |                     |
	| delete  | --all                                                                                       | minikube               | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| delete  | -p download-only-526823                                                                     | download-only-526823   | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| delete  | -p download-only-327632                                                                     | download-only-327632   | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| delete  | -p download-only-526823                                                                     | download-only-526823   | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| start   | --download-only -p                                                                          | download-docker-898650 | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC |                     |
	|         | download-docker-898650                                                                      |                        |         |         |                     |                     |
	|         | --alsologtostderr                                                                           |                        |         |         |                     |                     |
	|         | --driver=docker                                                                             |                        |         |         |                     |                     |
	|         | --container-runtime=containerd                                                              |                        |         |         |                     |                     |
	| delete  | -p download-docker-898650                                                                   | download-docker-898650 | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| start   | --download-only -p                                                                          | binary-mirror-236711   | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC |                     |
	|         | binary-mirror-236711                                                                        |                        |         |         |                     |                     |
	|         | --alsologtostderr                                                                           |                        |         |         |                     |                     |
	|         | --binary-mirror                                                                             |                        |         |         |                     |                     |
	|         | http://127.0.0.1:35737                                                                      |                        |         |         |                     |                     |
	|         | --driver=docker                                                                             |                        |         |         |                     |                     |
	|         | --container-runtime=containerd                                                              |                        |         |         |                     |                     |
	| delete  | -p binary-mirror-236711                                                                     | binary-mirror-236711   | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| addons  | enable dashboard -p                                                                         | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC |                     |
	|         | addons-155517                                                                               |                        |         |         |                     |                     |
	| addons  | disable dashboard -p                                                                        | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC |                     |
	|         | addons-155517                                                                               |                        |         |         |                     |                     |
	| start   | -p addons-155517 --wait=true                                                                | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:10 UTC |
	|         | --memory=4000 --alsologtostderr                                                             |                        |         |         |                     |                     |
	|         | --addons=registry                                                                           |                        |         |         |                     |                     |
	|         | --addons=metrics-server                                                                     |                        |         |         |                     |                     |
	|         | --addons=volumesnapshots                                                                    |                        |         |         |                     |                     |
	|         | --addons=csi-hostpath-driver                                                                |                        |         |         |                     |                     |
	|         | --addons=gcp-auth                                                                           |                        |         |         |                     |                     |
	|         | --addons=cloud-spanner                                                                      |                        |         |         |                     |                     |
	|         | --addons=inspektor-gadget                                                                   |                        |         |         |                     |                     |
	|         | --addons=storage-provisioner-rancher                                                        |                        |         |         |                     |                     |
	|         | --addons=nvidia-device-plugin                                                               |                        |         |         |                     |                     |
	|         | --addons=yakd --addons=volcano                                                              |                        |         |         |                     |                     |
	|         | --driver=docker                                                                             |                        |         |         |                     |                     |
	|         | --container-runtime=containerd                                                              |                        |         |         |                     |                     |
	|         | --addons=ingress                                                                            |                        |         |         |                     |                     |
	|         | --addons=ingress-dns                                                                        |                        |         |         |                     |                     |
	| addons  | enable headlamp                                                                             | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:10 UTC | 04 Jul 24 01:10 UTC |
	|         | -p addons-155517                                                                            |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1                                                                      |                        |         |         |                     |                     |
	| ip      | addons-155517 ip                                                                            | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:11 UTC | 04 Jul 24 01:11 UTC |
	| addons  | addons-155517 addons disable                                                                | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:11 UTC | 04 Jul 24 01:11 UTC |
	|         | registry --alsologtostderr                                                                  |                        |         |         |                     |                     |
	|         | -v=1                                                                                        |                        |         |         |                     |                     |
	| addons  | disable nvidia-device-plugin                                                                | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:11 UTC | 04 Jul 24 01:11 UTC |
	|         | -p addons-155517                                                                            |                        |         |         |                     |                     |
	| ssh     | addons-155517 ssh cat                                                                       | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:11 UTC | 04 Jul 24 01:11 UTC |
	|         | /opt/local-path-provisioner/pvc-c05b7d52-4c97-4ba9-8a04-478d46aaf85d_default_test-pvc/file1 |                        |         |         |                     |                     |
	| addons  | addons-155517 addons disable                                                                | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:11 UTC | 04 Jul 24 01:12 UTC |
	|         | storage-provisioner-rancher                                                                 |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1                                                                      |                        |         |         |                     |                     |
	| addons  | disable cloud-spanner -p                                                                    | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:12 UTC | 04 Jul 24 01:12 UTC |
	|         | addons-155517                                                                               |                        |         |         |                     |                     |
	| addons  | addons-155517 addons                                                                        | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:12 UTC | 04 Jul 24 01:12 UTC |
	|         | disable csi-hostpath-driver                                                                 |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1                                                                      |                        |         |         |                     |                     |
	| addons  | addons-155517 addons                                                                        | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:12 UTC | 04 Jul 24 01:12 UTC |
	|         | disable volumesnapshots                                                                     |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1                                                                      |                        |         |         |                     |                     |
	| addons  | disable inspektor-gadget -p                                                                 | addons-155517          | jenkins | v1.33.1 | 04 Jul 24 01:12 UTC |                     |
	|         | addons-155517                                                                               |                        |         |         |                     |                     |
	|---------|---------------------------------------------------------------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/07/04 01:08:12
	Running on machine: ip-172-31-30-239
	Binary: Built with gc go1.22.4 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0704 01:08:12.192271 1196445 out.go:291] Setting OutFile to fd 1 ...
	I0704 01:08:12.192443 1196445 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:08:12.192469 1196445 out.go:304] Setting ErrFile to fd 2...
	I0704 01:08:12.192488 1196445 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:08:12.192753 1196445 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
	I0704 01:08:12.193235 1196445 out.go:298] Setting JSON to false
	I0704 01:08:12.194161 1196445 start.go:129] hostinfo: {"hostname":"ip-172-31-30-239","uptime":24643,"bootTime":1720030650,"procs":172,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1064-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"92f46a7d-c249-4c12-924a-77f64874c910"}
	I0704 01:08:12.194241 1196445 start.go:139] virtualization:  
	I0704 01:08:12.196416 1196445 out.go:177] * [addons-155517] minikube v1.33.1 on Ubuntu 20.04 (arm64)
	I0704 01:08:12.199094 1196445 out.go:177]   - MINIKUBE_LOCATION=18859
	I0704 01:08:12.199240 1196445 notify.go:220] Checking for updates...
	I0704 01:08:12.202974 1196445 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0704 01:08:12.205218 1196445 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig
	I0704 01:08:12.207279 1196445 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube
	I0704 01:08:12.209429 1196445 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0704 01:08:12.211932 1196445 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0704 01:08:12.213938 1196445 driver.go:392] Setting default libvirt URI to qemu:///system
	I0704 01:08:12.234468 1196445 docker.go:122] docker version: linux-27.0.3:Docker Engine - Community
	I0704 01:08:12.234611 1196445 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 01:08:12.299096 1196445 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:29 OomKillDisable:true NGoroutines:44 SystemTime:2024-07-04 01:08:12.289472659 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 01:08:12.299211 1196445 docker.go:295] overlay module found
	I0704 01:08:12.301600 1196445 out.go:177] * Using the docker driver based on user configuration
	I0704 01:08:12.303741 1196445 start.go:297] selected driver: docker
	I0704 01:08:12.303757 1196445 start.go:901] validating driver "docker" against <nil>
	I0704 01:08:12.303770 1196445 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0704 01:08:12.304380 1196445 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 01:08:12.362176 1196445 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:29 OomKillDisable:true NGoroutines:44 SystemTime:2024-07-04 01:08:12.352995693 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 01:08:12.362343 1196445 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0704 01:08:12.362580 1196445 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0704 01:08:12.364680 1196445 out.go:177] * Using Docker driver with root privileges
	I0704 01:08:12.366418 1196445 cni.go:84] Creating CNI manager for ""
	I0704 01:08:12.366456 1196445 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0704 01:08:12.366471 1196445 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0704 01:08:12.366574 1196445 start.go:340] cluster config:
	{Name:addons-155517 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:addons-155517 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime
:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHA
uthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0704 01:08:12.368869 1196445 out.go:177] * Starting "addons-155517" primary control-plane node in "addons-155517" cluster
	I0704 01:08:12.371240 1196445 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0704 01:08:12.373736 1196445 out.go:177] * Pulling base image v0.0.44-1719972989-19184 ...
	I0704 01:08:12.376681 1196445 preload.go:132] Checking if preload exists for k8s version v1.30.2 and runtime containerd
	I0704 01:08:12.376741 1196445 preload.go:147] Found local preload: /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.30.2-containerd-overlay2-arm64.tar.lz4
	I0704 01:08:12.376753 1196445 cache.go:56] Caching tarball of preloaded images
	I0704 01:08:12.376764 1196445 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 in local docker daemon
	I0704 01:08:12.376836 1196445 preload.go:173] Found /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.30.2-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0704 01:08:12.376846 1196445 cache.go:59] Finished verifying existence of preloaded tar for v1.30.2 on containerd
	I0704 01:08:12.377194 1196445 profile.go:143] Saving config to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/config.json ...
	I0704 01:08:12.377218 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/config.json: {Name:mk1983fdaacaaa697964d44e7205145a658b8fe2 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:12.392599 1196445 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 to local cache
	I0704 01:08:12.392727 1196445 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 in local cache directory
	I0704 01:08:12.392756 1196445 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 in local cache directory, skipping pull
	I0704 01:08:12.392765 1196445 image.go:105] gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 exists in cache, skipping pull
	I0704 01:08:12.392773 1196445 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 as a tarball
	I0704 01:08:12.392779 1196445 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 from local cache
	I0704 01:08:28.987230 1196445 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 from cached tarball
	I0704 01:08:28.987269 1196445 cache.go:194] Successfully downloaded all kic artifacts
	I0704 01:08:28.987306 1196445 start.go:360] acquireMachinesLock for addons-155517: {Name:mk8b1bd096582ae2ddeb51ce97c96e8bd6c10c03 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0704 01:08:28.987436 1196445 start.go:364] duration metric: took 108.035µs to acquireMachinesLock for "addons-155517"
	I0704 01:08:28.987467 1196445 start.go:93] Provisioning new machine with config: &{Name:addons-155517 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:addons-155517 Namespace:default APIServerHAVIP: APIServerName:min
ikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:fa
lse CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0704 01:08:28.987567 1196445 start.go:125] createHost starting for "" (driver="docker")
	I0704 01:08:28.990219 1196445 out.go:204] * Creating docker container (CPUs=2, Memory=4000MB) ...
	I0704 01:08:28.990467 1196445 start.go:159] libmachine.API.Create for "addons-155517" (driver="docker")
	I0704 01:08:28.990494 1196445 client.go:168] LocalClient.Create starting
	I0704 01:08:28.990609 1196445 main.go:141] libmachine: Creating CA: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem
	I0704 01:08:29.608241 1196445 main.go:141] libmachine: Creating client certificate: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/cert.pem
	I0704 01:08:29.736436 1196445 cli_runner.go:164] Run: docker network inspect addons-155517 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0704 01:08:29.752779 1196445 cli_runner.go:211] docker network inspect addons-155517 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0704 01:08:29.752887 1196445 network_create.go:284] running [docker network inspect addons-155517] to gather additional debugging logs...
	I0704 01:08:29.752909 1196445 cli_runner.go:164] Run: docker network inspect addons-155517
	W0704 01:08:29.768706 1196445 cli_runner.go:211] docker network inspect addons-155517 returned with exit code 1
	I0704 01:08:29.768736 1196445 network_create.go:287] error running [docker network inspect addons-155517]: docker network inspect addons-155517: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network addons-155517 not found
	I0704 01:08:29.768749 1196445 network_create.go:289] output of [docker network inspect addons-155517]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network addons-155517 not found
	
	** /stderr **
	I0704 01:08:29.768847 1196445 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0704 01:08:29.784069 1196445 network.go:206] using free private subnet 192.168.49.0/24: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x4001c8c290}
	I0704 01:08:29.784107 1196445 network_create.go:124] attempt to create docker network addons-155517 192.168.49.0/24 with gateway 192.168.49.1 and MTU of 1500 ...
	I0704 01:08:29.784173 1196445 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.49.0/24 --gateway=192.168.49.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=addons-155517 addons-155517
	I0704 01:08:29.849610 1196445 network_create.go:108] docker network addons-155517 192.168.49.0/24 created
	I0704 01:08:29.849640 1196445 kic.go:121] calculated static IP "192.168.49.2" for the "addons-155517" container
	I0704 01:08:29.849732 1196445 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0704 01:08:29.864369 1196445 cli_runner.go:164] Run: docker volume create addons-155517 --label name.minikube.sigs.k8s.io=addons-155517 --label created_by.minikube.sigs.k8s.io=true
	I0704 01:08:29.881618 1196445 oci.go:103] Successfully created a docker volume addons-155517
	I0704 01:08:29.881710 1196445 cli_runner.go:164] Run: docker run --rm --name addons-155517-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-155517 --entrypoint /usr/bin/test -v addons-155517:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 -d /var/lib
	I0704 01:08:31.824188 1196445 cli_runner.go:217] Completed: docker run --rm --name addons-155517-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-155517 --entrypoint /usr/bin/test -v addons-155517:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 -d /var/lib: (1.942438119s)
	I0704 01:08:31.824231 1196445 oci.go:107] Successfully prepared a docker volume addons-155517
	I0704 01:08:31.824258 1196445 preload.go:132] Checking if preload exists for k8s version v1.30.2 and runtime containerd
	I0704 01:08:31.824278 1196445 kic.go:194] Starting extracting preloaded images to volume ...
	I0704 01:08:31.824367 1196445 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.30.2-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v addons-155517:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 -I lz4 -xf /preloaded.tar -C /extractDir
	I0704 01:08:36.228874 1196445 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.30.2-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v addons-155517:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 -I lz4 -xf /preloaded.tar -C /extractDir: (4.404462021s)
	I0704 01:08:36.228908 1196445 kic.go:203] duration metric: took 4.404626194s to extract preloaded images to volume ...
	W0704 01:08:36.229067 1196445 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0704 01:08:36.229179 1196445 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0704 01:08:36.288715 1196445 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname addons-155517 --name addons-155517 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-155517 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=addons-155517 --network addons-155517 --ip 192.168.49.2 --volume addons-155517:/var --security-opt apparmor=unconfined --memory=4000mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1
	I0704 01:08:36.596197 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Running}}
	I0704 01:08:36.616187 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:08:36.634979 1196445 cli_runner.go:164] Run: docker exec addons-155517 stat /var/lib/dpkg/alternatives/iptables
	I0704 01:08:36.703846 1196445 oci.go:144] the created container "addons-155517" has a running status.
	I0704 01:08:36.703874 1196445 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa...
	I0704 01:08:37.190789 1196445 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0704 01:08:37.210820 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:08:37.232784 1196445 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0704 01:08:37.232803 1196445 kic_runner.go:114] Args: [docker exec --privileged addons-155517 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0704 01:08:37.300309 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:08:37.323611 1196445 machine.go:94] provisionDockerMachine start ...
	I0704 01:08:37.323700 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:37.348214 1196445 main.go:141] libmachine: Using SSH client type: native
	I0704 01:08:37.348477 1196445 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e2ba0] 0x3e5400 <nil>  [] 0s} 127.0.0.1 33941 <nil> <nil>}
	I0704 01:08:37.348487 1196445 main.go:141] libmachine: About to run SSH command:
	hostname
	I0704 01:08:37.530824 1196445 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-155517
	
	I0704 01:08:37.530887 1196445 ubuntu.go:169] provisioning hostname "addons-155517"
	I0704 01:08:37.530986 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:37.553591 1196445 main.go:141] libmachine: Using SSH client type: native
	I0704 01:08:37.553832 1196445 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e2ba0] 0x3e5400 <nil>  [] 0s} 127.0.0.1 33941 <nil> <nil>}
	I0704 01:08:37.553843 1196445 main.go:141] libmachine: About to run SSH command:
	sudo hostname addons-155517 && echo "addons-155517" | sudo tee /etc/hostname
	I0704 01:08:37.714152 1196445 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-155517
	
	I0704 01:08:37.714277 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:37.740011 1196445 main.go:141] libmachine: Using SSH client type: native
	I0704 01:08:37.740256 1196445 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e2ba0] 0x3e5400 <nil>  [] 0s} 127.0.0.1 33941 <nil> <nil>}
	I0704 01:08:37.740273 1196445 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\saddons-155517' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 addons-155517/g' /etc/hosts;
				else 
					echo '127.0.1.1 addons-155517' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0704 01:08:37.884114 1196445 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0704 01:08:37.884141 1196445 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/18859-1190282/.minikube CaCertPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/18859-1190282/.minikube}
	I0704 01:08:37.884170 1196445 ubuntu.go:177] setting up certificates
	I0704 01:08:37.884179 1196445 provision.go:84] configureAuth start
	I0704 01:08:37.884270 1196445 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-155517
	I0704 01:08:37.903878 1196445 provision.go:143] copyHostCerts
	I0704 01:08:37.903989 1196445 exec_runner.go:151] cp: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.pem (1078 bytes)
	I0704 01:08:37.904149 1196445 exec_runner.go:151] cp: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/18859-1190282/.minikube/cert.pem (1123 bytes)
	I0704 01:08:37.904212 1196445 exec_runner.go:151] cp: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/18859-1190282/.minikube/key.pem (1675 bytes)
	I0704 01:08:37.904262 1196445 provision.go:117] generating server cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca-key.pem org=jenkins.addons-155517 san=[127.0.0.1 192.168.49.2 addons-155517 localhost minikube]
	I0704 01:08:38.603884 1196445 provision.go:177] copyRemoteCerts
	I0704 01:08:38.603955 1196445 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0704 01:08:38.604000 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:38.623978 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:08:38.720166 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0704 01:08:38.743354 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0704 01:08:38.766785 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0704 01:08:38.789642 1196445 provision.go:87] duration metric: took 905.449187ms to configureAuth
	I0704 01:08:38.789672 1196445 ubuntu.go:193] setting minikube options for container-runtime
	I0704 01:08:38.789856 1196445 config.go:182] Loaded profile config "addons-155517": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 01:08:38.789871 1196445 machine.go:97] duration metric: took 1.466242644s to provisionDockerMachine
	I0704 01:08:38.789879 1196445 client.go:171] duration metric: took 9.799379567s to LocalClient.Create
	I0704 01:08:38.789892 1196445 start.go:167] duration metric: took 9.799425482s to libmachine.API.Create "addons-155517"
	I0704 01:08:38.789902 1196445 start.go:293] postStartSetup for "addons-155517" (driver="docker")
	I0704 01:08:38.789912 1196445 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0704 01:08:38.789968 1196445 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0704 01:08:38.790010 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:38.806758 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:08:38.904686 1196445 ssh_runner.go:195] Run: cat /etc/os-release
	I0704 01:08:38.908018 1196445 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0704 01:08:38.908055 1196445 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0704 01:08:38.908066 1196445 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0704 01:08:38.908082 1196445 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0704 01:08:38.908093 1196445 filesync.go:126] Scanning /home/jenkins/minikube-integration/18859-1190282/.minikube/addons for local assets ...
	I0704 01:08:38.908162 1196445 filesync.go:126] Scanning /home/jenkins/minikube-integration/18859-1190282/.minikube/files for local assets ...
	I0704 01:08:38.908190 1196445 start.go:296] duration metric: took 118.282456ms for postStartSetup
	I0704 01:08:38.908511 1196445 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-155517
	I0704 01:08:38.923846 1196445 profile.go:143] Saving config to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/config.json ...
	I0704 01:08:38.924136 1196445 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0704 01:08:38.924185 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:38.940968 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:08:39.036923 1196445 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0704 01:08:39.042092 1196445 start.go:128] duration metric: took 10.054507303s to createHost
	I0704 01:08:39.042119 1196445 start.go:83] releasing machines lock for "addons-155517", held for 10.054668801s
	I0704 01:08:39.042203 1196445 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-155517
	I0704 01:08:39.059237 1196445 ssh_runner.go:195] Run: cat /version.json
	I0704 01:08:39.059305 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:39.059637 1196445 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0704 01:08:39.059700 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:08:39.080747 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:08:39.097096 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:08:39.175033 1196445 ssh_runner.go:195] Run: systemctl --version
	I0704 01:08:39.348341 1196445 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0704 01:08:39.352559 1196445 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0704 01:08:39.378748 1196445 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0704 01:08:39.378866 1196445 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%!p(MISSING), " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0704 01:08:39.409039 1196445 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0704 01:08:39.409077 1196445 start.go:495] detecting cgroup driver to use...
	I0704 01:08:39.409110 1196445 detect.go:196] detected "cgroupfs" cgroup driver on host os
	I0704 01:08:39.409164 1196445 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0704 01:08:39.422321 1196445 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0704 01:08:39.434248 1196445 docker.go:217] disabling cri-docker service (if available) ...
	I0704 01:08:39.434344 1196445 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0704 01:08:39.448530 1196445 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0704 01:08:39.463578 1196445 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0704 01:08:39.552252 1196445 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0704 01:08:39.641041 1196445 docker.go:233] disabling docker service ...
	I0704 01:08:39.641108 1196445 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0704 01:08:39.660807 1196445 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0704 01:08:39.673868 1196445 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0704 01:08:39.763520 1196445 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0704 01:08:39.851995 1196445 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0704 01:08:39.863472 1196445 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %!s(MISSING) "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0704 01:08:39.880705 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.9"|' /etc/containerd/config.toml"
	I0704 01:08:39.890751 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0704 01:08:39.901368 1196445 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0704 01:08:39.901467 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0704 01:08:39.911715 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0704 01:08:39.921830 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0704 01:08:39.931688 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0704 01:08:39.941303 1196445 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0704 01:08:39.950585 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0704 01:08:39.960278 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0704 01:08:39.972286 1196445 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0704 01:08:39.982106 1196445 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0704 01:08:39.991352 1196445 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0704 01:08:39.999917 1196445 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0704 01:08:40.099343 1196445 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0704 01:08:40.243998 1196445 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0704 01:08:40.244098 1196445 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0704 01:08:40.247881 1196445 start.go:563] Will wait 60s for crictl version
	I0704 01:08:40.247950 1196445 ssh_runner.go:195] Run: which crictl
	I0704 01:08:40.251327 1196445 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0704 01:08:40.296521 1196445 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.18
	RuntimeApiVersion:  v1
	I0704 01:08:40.296607 1196445 ssh_runner.go:195] Run: containerd --version
	I0704 01:08:40.318696 1196445 ssh_runner.go:195] Run: containerd --version
	I0704 01:08:40.343842 1196445 out.go:177] * Preparing Kubernetes v1.30.2 on containerd 1.7.18 ...
	I0704 01:08:40.345454 1196445 cli_runner.go:164] Run: docker network inspect addons-155517 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0704 01:08:40.361140 1196445 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0704 01:08:40.364944 1196445 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0704 01:08:40.376129 1196445 kubeadm.go:877] updating cluster {Name:addons-155517 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:addons-155517 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNa
mes:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Cus
tomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0704 01:08:40.376264 1196445 preload.go:132] Checking if preload exists for k8s version v1.30.2 and runtime containerd
	I0704 01:08:40.376340 1196445 ssh_runner.go:195] Run: sudo crictl images --output json
	I0704 01:08:40.412122 1196445 containerd.go:627] all images are preloaded for containerd runtime.
	I0704 01:08:40.412146 1196445 containerd.go:534] Images already preloaded, skipping extraction
	I0704 01:08:40.412211 1196445 ssh_runner.go:195] Run: sudo crictl images --output json
	I0704 01:08:40.448869 1196445 containerd.go:627] all images are preloaded for containerd runtime.
	I0704 01:08:40.448892 1196445 cache_images.go:84] Images are preloaded, skipping loading
	I0704 01:08:40.448900 1196445 kubeadm.go:928] updating node { 192.168.49.2 8443 v1.30.2 containerd true true} ...
	I0704 01:08:40.449001 1196445 kubeadm.go:940] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.30.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=addons-155517 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.30.2 ClusterName:addons-155517 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0704 01:08:40.449071 1196445 ssh_runner.go:195] Run: sudo crictl info
	I0704 01:08:40.489171 1196445 cni.go:84] Creating CNI manager for ""
	I0704 01:08:40.489196 1196445 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0704 01:08:40.489206 1196445 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0704 01:08:40.489228 1196445 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.30.2 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:addons-155517 NodeName:addons-155517 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc
/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0704 01:08:40.489399 1196445 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "addons-155517"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.30.2
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%!"(MISSING)
	  nodefs.inodesFree: "0%!"(MISSING)
	  imagefs.available: "0%!"(MISSING)
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0704 01:08:40.489469 1196445 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.30.2
	I0704 01:08:40.498054 1196445 binaries.go:44] Found k8s binaries, skipping transfer
	I0704 01:08:40.498164 1196445 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0704 01:08:40.506560 1196445 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (317 bytes)
	I0704 01:08:40.524624 1196445 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0704 01:08:40.542714 1196445 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2167 bytes)
	I0704 01:08:40.560914 1196445 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0704 01:08:40.564512 1196445 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0704 01:08:40.575356 1196445 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0704 01:08:40.663366 1196445 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0704 01:08:40.678704 1196445 certs.go:68] Setting up /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517 for IP: 192.168.49.2
	I0704 01:08:40.678774 1196445 certs.go:194] generating shared ca certs ...
	I0704 01:08:40.678804 1196445 certs.go:226] acquiring lock for ca certs: {Name:mk4f0dbc18506f7ee4fcbc10f124348dd208ffc0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:40.678969 1196445 certs.go:240] generating "minikubeCA" ca cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.key
	I0704 01:08:41.197423 1196445 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt ...
	I0704 01:08:41.197454 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt: {Name:mkb28c983e13ee826bf585de68c8dd48b64194c2 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:41.197647 1196445 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.key ...
	I0704 01:08:41.197660 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.key: {Name:mk0f96934eb2f8ea5b78e7bab1383e47ca4c47bf Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:41.197743 1196445 certs.go:240] generating "proxyClientCA" ca cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.key
	I0704 01:08:41.716569 1196445 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.crt ...
	I0704 01:08:41.716608 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.crt: {Name:mk49decf76e004afe576981c44baf46e246e42aa Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:41.716807 1196445 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.key ...
	I0704 01:08:41.716820 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.key: {Name:mkfa598677724bedcdda29a6fc68fc0dff6ee016 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:41.717561 1196445 certs.go:256] generating profile certs ...
	I0704 01:08:41.717627 1196445 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.key
	I0704 01:08:41.717645 1196445 crypto.go:68] Generating cert /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt with IP's: []
	I0704 01:08:42.174023 1196445 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt ...
	I0704 01:08:42.174061 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: {Name:mkc35913a7eb1db2825718f6dc2b65e7745aa5c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:42.174290 1196445 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.key ...
	I0704 01:08:42.174304 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.key: {Name:mkfadadfe13d73634e029f31163c920115daacae Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:42.174396 1196445 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.key.545f2af5
	I0704 01:08:42.174417 1196445 crypto.go:68] Generating cert /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.crt.545f2af5 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2]
	I0704 01:08:42.578819 1196445 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.crt.545f2af5 ...
	I0704 01:08:42.578850 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.crt.545f2af5: {Name:mk7ecc68d7a90c858109ff8e83b26bc005452b6f Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:42.579635 1196445 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.key.545f2af5 ...
	I0704 01:08:42.579664 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.key.545f2af5: {Name:mk9daa218eca8496a54e034db67849e3cbe7a05c Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:42.580299 1196445 certs.go:381] copying /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.crt.545f2af5 -> /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.crt
	I0704 01:08:42.580398 1196445 certs.go:385] copying /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.key.545f2af5 -> /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.key
	I0704 01:08:42.580459 1196445 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.key
	I0704 01:08:42.580484 1196445 crypto.go:68] Generating cert /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.crt with IP's: []
	I0704 01:08:42.984339 1196445 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.crt ...
	I0704 01:08:42.984371 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.crt: {Name:mk9654e604aa4538cb254b142d3cacdd3534a634 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:42.984566 1196445 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.key ...
	I0704 01:08:42.984584 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.key: {Name:mkcae6cd9331f55e5fdb956fdd3942c8d213675d Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:08:42.984770 1196445 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca-key.pem (1679 bytes)
	I0704 01:08:42.984813 1196445 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem (1078 bytes)
	I0704 01:08:42.984842 1196445 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/cert.pem (1123 bytes)
	I0704 01:08:42.984870 1196445 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/key.pem (1675 bytes)
	I0704 01:08:42.985445 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0704 01:08:43.011174 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1671 bytes)
	I0704 01:08:43.035588 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0704 01:08:43.061964 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0704 01:08:43.086326 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1419 bytes)
	I0704 01:08:43.110608 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0704 01:08:43.134625 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0704 01:08:43.157943 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0704 01:08:43.181717 1196445 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0704 01:08:43.206975 1196445 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0704 01:08:43.224889 1196445 ssh_runner.go:195] Run: openssl version
	I0704 01:08:43.230415 1196445 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0704 01:08:43.239834 1196445 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0704 01:08:43.243271 1196445 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Jul  4 01:08 /usr/share/ca-certificates/minikubeCA.pem
	I0704 01:08:43.243394 1196445 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0704 01:08:43.250079 1196445 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0704 01:08:43.259384 1196445 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0704 01:08:43.262831 1196445 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0704 01:08:43.262919 1196445 kubeadm.go:391] StartCluster: {Name:addons-155517 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:addons-155517 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames
:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Custom
QemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0704 01:08:43.263009 1196445 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0704 01:08:43.263066 1196445 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0704 01:08:43.303591 1196445 cri.go:89] found id: ""
	I0704 01:08:43.303662 1196445 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0704 01:08:43.312342 1196445 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0704 01:08:43.321279 1196445 kubeadm.go:213] ignoring SystemVerification for kubeadm because of docker driver
	I0704 01:08:43.321369 1196445 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0704 01:08:43.329958 1196445 kubeadm.go:154] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0704 01:08:43.330017 1196445 kubeadm.go:156] found existing configuration files:
	
	I0704 01:08:43.330077 1196445 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0704 01:08:43.338479 1196445 kubeadm.go:162] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0704 01:08:43.338539 1196445 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0704 01:08:43.346581 1196445 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0704 01:08:43.355511 1196445 kubeadm.go:162] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0704 01:08:43.355614 1196445 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0704 01:08:43.363887 1196445 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0704 01:08:43.372345 1196445 kubeadm.go:162] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0704 01:08:43.372436 1196445 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0704 01:08:43.380824 1196445 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0704 01:08:43.389491 1196445 kubeadm.go:162] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0704 01:08:43.389581 1196445 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0704 01:08:43.398028 1196445 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.30.2:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0704 01:08:43.442669 1196445 kubeadm.go:309] [init] Using Kubernetes version: v1.30.2
	I0704 01:08:43.442729 1196445 kubeadm.go:309] [preflight] Running pre-flight checks
	I0704 01:08:43.482986 1196445 kubeadm.go:309] [preflight] The system verification failed. Printing the output from the verification:
	I0704 01:08:43.483062 1196445 kubeadm.go:309] KERNEL_VERSION: 5.15.0-1064-aws
	I0704 01:08:43.483101 1196445 kubeadm.go:309] OS: Linux
	I0704 01:08:43.483149 1196445 kubeadm.go:309] CGROUPS_CPU: enabled
	I0704 01:08:43.483199 1196445 kubeadm.go:309] CGROUPS_CPUACCT: enabled
	I0704 01:08:43.483248 1196445 kubeadm.go:309] CGROUPS_CPUSET: enabled
	I0704 01:08:43.483314 1196445 kubeadm.go:309] CGROUPS_DEVICES: enabled
	I0704 01:08:43.483364 1196445 kubeadm.go:309] CGROUPS_FREEZER: enabled
	I0704 01:08:43.483415 1196445 kubeadm.go:309] CGROUPS_MEMORY: enabled
	I0704 01:08:43.483465 1196445 kubeadm.go:309] CGROUPS_PIDS: enabled
	I0704 01:08:43.483531 1196445 kubeadm.go:309] CGROUPS_HUGETLB: enabled
	I0704 01:08:43.483582 1196445 kubeadm.go:309] CGROUPS_BLKIO: enabled
	I0704 01:08:43.557602 1196445 kubeadm.go:309] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0704 01:08:43.557712 1196445 kubeadm.go:309] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0704 01:08:43.557806 1196445 kubeadm.go:309] [preflight] You can also perform this action in beforehand using 'kubeadm config images pull'
	I0704 01:08:43.820944 1196445 kubeadm.go:309] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0704 01:08:43.824525 1196445 out.go:204]   - Generating certificates and keys ...
	I0704 01:08:43.824630 1196445 kubeadm.go:309] [certs] Using existing ca certificate authority
	I0704 01:08:43.824724 1196445 kubeadm.go:309] [certs] Using existing apiserver certificate and key on disk
	I0704 01:08:44.229464 1196445 kubeadm.go:309] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0704 01:08:44.869561 1196445 kubeadm.go:309] [certs] Generating "front-proxy-ca" certificate and key
	I0704 01:08:44.995121 1196445 kubeadm.go:309] [certs] Generating "front-proxy-client" certificate and key
	I0704 01:08:45.945765 1196445 kubeadm.go:309] [certs] Generating "etcd/ca" certificate and key
	I0704 01:08:46.426406 1196445 kubeadm.go:309] [certs] Generating "etcd/server" certificate and key
	I0704 01:08:46.426727 1196445 kubeadm.go:309] [certs] etcd/server serving cert is signed for DNS names [addons-155517 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0704 01:08:46.568753 1196445 kubeadm.go:309] [certs] Generating "etcd/peer" certificate and key
	I0704 01:08:46.569065 1196445 kubeadm.go:309] [certs] etcd/peer serving cert is signed for DNS names [addons-155517 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0704 01:08:46.824637 1196445 kubeadm.go:309] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0704 01:08:47.178188 1196445 kubeadm.go:309] [certs] Generating "apiserver-etcd-client" certificate and key
	I0704 01:08:47.938770 1196445 kubeadm.go:309] [certs] Generating "sa" key and public key
	I0704 01:08:47.939012 1196445 kubeadm.go:309] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0704 01:08:48.532992 1196445 kubeadm.go:309] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0704 01:08:48.700658 1196445 kubeadm.go:309] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0704 01:08:49.343140 1196445 kubeadm.go:309] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0704 01:08:50.301229 1196445 kubeadm.go:309] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0704 01:08:50.970394 1196445 kubeadm.go:309] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0704 01:08:50.971331 1196445 kubeadm.go:309] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0704 01:08:50.976216 1196445 kubeadm.go:309] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0704 01:08:50.978409 1196445 out.go:204]   - Booting up control plane ...
	I0704 01:08:50.978513 1196445 kubeadm.go:309] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0704 01:08:50.978594 1196445 kubeadm.go:309] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0704 01:08:50.979204 1196445 kubeadm.go:309] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0704 01:08:50.989631 1196445 kubeadm.go:309] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0704 01:08:50.991151 1196445 kubeadm.go:309] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0704 01:08:50.991199 1196445 kubeadm.go:309] [kubelet-start] Starting the kubelet
	I0704 01:08:51.101564 1196445 kubeadm.go:309] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0704 01:08:51.101665 1196445 kubeadm.go:309] [kubelet-check] Waiting for a healthy kubelet. This can take up to 4m0s
	I0704 01:08:52.602456 1196445 kubeadm.go:309] [kubelet-check] The kubelet is healthy after 1.501248538s
	I0704 01:08:52.602548 1196445 kubeadm.go:309] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0704 01:08:59.104335 1196445 kubeadm.go:309] [api-check] The API server is healthy after 6.50185707s
	I0704 01:08:59.129403 1196445 kubeadm.go:309] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0704 01:08:59.143665 1196445 kubeadm.go:309] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0704 01:08:59.169656 1196445 kubeadm.go:309] [upload-certs] Skipping phase. Please see --upload-certs
	I0704 01:08:59.169845 1196445 kubeadm.go:309] [mark-control-plane] Marking the node addons-155517 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0704 01:08:59.181202 1196445 kubeadm.go:309] [bootstrap-token] Using token: 7nkvt2.ozp2nts9dnthdvog
	I0704 01:08:59.182924 1196445 out.go:204]   - Configuring RBAC rules ...
	I0704 01:08:59.183052 1196445 kubeadm.go:309] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0704 01:08:59.189973 1196445 kubeadm.go:309] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0704 01:08:59.198030 1196445 kubeadm.go:309] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0704 01:08:59.202188 1196445 kubeadm.go:309] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0704 01:08:59.205935 1196445 kubeadm.go:309] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0704 01:08:59.209870 1196445 kubeadm.go:309] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0704 01:08:59.517980 1196445 kubeadm.go:309] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0704 01:08:59.966077 1196445 kubeadm.go:309] [addons] Applied essential addon: CoreDNS
	I0704 01:09:00.515905 1196445 kubeadm.go:309] [addons] Applied essential addon: kube-proxy
	I0704 01:09:00.516982 1196445 kubeadm.go:309] 
	I0704 01:09:00.517083 1196445 kubeadm.go:309] Your Kubernetes control-plane has initialized successfully!
	I0704 01:09:00.517098 1196445 kubeadm.go:309] 
	I0704 01:09:00.517185 1196445 kubeadm.go:309] To start using your cluster, you need to run the following as a regular user:
	I0704 01:09:00.517190 1196445 kubeadm.go:309] 
	I0704 01:09:00.517215 1196445 kubeadm.go:309]   mkdir -p $HOME/.kube
	I0704 01:09:00.517272 1196445 kubeadm.go:309]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0704 01:09:00.517328 1196445 kubeadm.go:309]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0704 01:09:00.517334 1196445 kubeadm.go:309] 
	I0704 01:09:00.517385 1196445 kubeadm.go:309] Alternatively, if you are the root user, you can run:
	I0704 01:09:00.517390 1196445 kubeadm.go:309] 
	I0704 01:09:00.517436 1196445 kubeadm.go:309]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0704 01:09:00.517442 1196445 kubeadm.go:309] 
	I0704 01:09:00.517492 1196445 kubeadm.go:309] You should now deploy a pod network to the cluster.
	I0704 01:09:00.517564 1196445 kubeadm.go:309] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0704 01:09:00.517632 1196445 kubeadm.go:309]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0704 01:09:00.517637 1196445 kubeadm.go:309] 
	I0704 01:09:00.517719 1196445 kubeadm.go:309] You can now join any number of control-plane nodes by copying certificate authorities
	I0704 01:09:00.517792 1196445 kubeadm.go:309] and service account keys on each node and then running the following as root:
	I0704 01:09:00.517797 1196445 kubeadm.go:309] 
	I0704 01:09:00.517884 1196445 kubeadm.go:309]   kubeadm join control-plane.minikube.internal:8443 --token 7nkvt2.ozp2nts9dnthdvog \
	I0704 01:09:00.517985 1196445 kubeadm.go:309] 	--discovery-token-ca-cert-hash sha256:5b6b816aa61ec76ffa7acb157372c74648707423ad3df4db41b9bf88dbe1edfa \
	I0704 01:09:00.518011 1196445 kubeadm.go:309] 	--control-plane 
	I0704 01:09:00.518017 1196445 kubeadm.go:309] 
	I0704 01:09:00.518099 1196445 kubeadm.go:309] Then you can join any number of worker nodes by running the following on each as root:
	I0704 01:09:00.518104 1196445 kubeadm.go:309] 
	I0704 01:09:00.518183 1196445 kubeadm.go:309] kubeadm join control-plane.minikube.internal:8443 --token 7nkvt2.ozp2nts9dnthdvog \
	I0704 01:09:00.518281 1196445 kubeadm.go:309] 	--discovery-token-ca-cert-hash sha256:5b6b816aa61ec76ffa7acb157372c74648707423ad3df4db41b9bf88dbe1edfa 
	I0704 01:09:00.520643 1196445 kubeadm.go:309] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1064-aws\n", err: exit status 1
	I0704 01:09:00.520759 1196445 kubeadm.go:309] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0704 01:09:00.520778 1196445 cni.go:84] Creating CNI manager for ""
	I0704 01:09:00.520793 1196445 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0704 01:09:00.523121 1196445 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0704 01:09:00.525213 1196445 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0704 01:09:00.530132 1196445 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.30.2/kubectl ...
	I0704 01:09:00.530152 1196445 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2438 bytes)
	I0704 01:09:00.549314 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0704 01:09:00.814623 1196445 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0704 01:09:00.814727 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:00.814782 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes addons-155517 minikube.k8s.io/updated_at=2024_07_04T01_09_00_0700 minikube.k8s.io/version=v1.33.1 minikube.k8s.io/commit=b003e6195fd8aae2e8757a7316e2960f465339c8 minikube.k8s.io/name=addons-155517 minikube.k8s.io/primary=true
	I0704 01:09:00.972427 1196445 ops.go:34] apiserver oom_adj: -16
	I0704 01:09:00.972641 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:01.472756 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:01.972695 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:02.472885 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:02.973558 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:03.473435 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:03.973670 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:04.473303 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:04.973640 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:05.473357 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:05.973493 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:06.473432 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:06.972686 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:07.472874 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:07.973677 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:08.472770 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:08.973060 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:09.473021 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:09.972794 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:10.472869 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:10.973078 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:11.473672 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:11.972769 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:12.473575 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:12.972752 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:13.473123 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:13.973407 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:14.472957 1196445 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.30.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0704 01:09:14.571895 1196445 kubeadm.go:1107] duration metric: took 13.757267057s to wait for elevateKubeSystemPrivileges
	W0704 01:09:14.571932 1196445 kubeadm.go:286] apiserver tunnel failed: apiserver port not set
	I0704 01:09:14.571940 1196445 kubeadm.go:393] duration metric: took 31.309027137s to StartCluster
	I0704 01:09:14.571956 1196445 settings.go:142] acquiring lock: {Name:mk6d49b718ddc65478a80e50434df6064c31eee4 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:09:14.572073 1196445 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/18859-1190282/kubeconfig
	I0704 01:09:14.572478 1196445 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/kubeconfig: {Name:mkcb1dc68318dea0090dbb67854ab85e2d8d0252 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 01:09:14.572667 1196445 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0704 01:09:14.572807 1196445 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.30.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0704 01:09:14.573070 1196445 config.go:182] Loaded profile config "addons-155517": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 01:09:14.573109 1196445 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:true csi-hostpath-driver:true dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:true gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:true ingress-dns:true inspektor-gadget:true istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:true nvidia-device-plugin:true nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:true registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:true volcano:true volumesnapshots:true yakd:true]
	I0704 01:09:14.573210 1196445 addons.go:69] Setting yakd=true in profile "addons-155517"
	I0704 01:09:14.573236 1196445 addons.go:234] Setting addon yakd=true in "addons-155517"
	I0704 01:09:14.573263 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.573732 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.574149 1196445 addons.go:69] Setting metrics-server=true in profile "addons-155517"
	I0704 01:09:14.574180 1196445 addons.go:234] Setting addon metrics-server=true in "addons-155517"
	I0704 01:09:14.574206 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.574253 1196445 addons.go:69] Setting nvidia-device-plugin=true in profile "addons-155517"
	I0704 01:09:14.574275 1196445 addons.go:234] Setting addon nvidia-device-plugin=true in "addons-155517"
	I0704 01:09:14.574297 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.574634 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.574730 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.577258 1196445 addons.go:69] Setting registry=true in profile "addons-155517"
	I0704 01:09:14.577394 1196445 addons.go:234] Setting addon registry=true in "addons-155517"
	I0704 01:09:14.577449 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.579535 1196445 addons.go:69] Setting storage-provisioner=true in profile "addons-155517"
	I0704 01:09:14.579602 1196445 addons.go:234] Setting addon storage-provisioner=true in "addons-155517"
	I0704 01:09:14.579640 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.580179 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580252 1196445 addons.go:69] Setting cloud-spanner=true in profile "addons-155517"
	I0704 01:09:14.580300 1196445 addons.go:234] Setting addon cloud-spanner=true in "addons-155517"
	I0704 01:09:14.591703 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.592237 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580425 1196445 addons.go:69] Setting csi-hostpath-driver=true in profile "addons-155517"
	I0704 01:09:14.597313 1196445 addons.go:234] Setting addon csi-hostpath-driver=true in "addons-155517"
	I0704 01:09:14.597353 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.597793 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580434 1196445 addons.go:69] Setting default-storageclass=true in profile "addons-155517"
	I0704 01:09:14.598119 1196445 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "addons-155517"
	I0704 01:09:14.598447 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580438 1196445 addons.go:69] Setting gcp-auth=true in profile "addons-155517"
	I0704 01:09:14.618488 1196445 mustload.go:65] Loading cluster: addons-155517
	I0704 01:09:14.618673 1196445 config.go:182] Loaded profile config "addons-155517": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 01:09:14.618933 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580441 1196445 addons.go:69] Setting ingress=true in profile "addons-155517"
	I0704 01:09:14.633852 1196445 addons.go:234] Setting addon ingress=true in "addons-155517"
	I0704 01:09:14.633911 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.634367 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580444 1196445 addons.go:69] Setting ingress-dns=true in profile "addons-155517"
	I0704 01:09:14.659280 1196445 addons.go:234] Setting addon ingress-dns=true in "addons-155517"
	I0704 01:09:14.659332 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.659837 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.580448 1196445 addons.go:69] Setting inspektor-gadget=true in profile "addons-155517"
	I0704 01:09:14.683730 1196445 addons.go:234] Setting addon inspektor-gadget=true in "addons-155517"
	I0704 01:09:14.683777 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.684227 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.698704 1196445 out.go:177]   - Using image nvcr.io/nvidia/k8s-device-plugin:v0.15.1
	I0704 01:09:14.701274 1196445 out.go:177]   - Using image gcr.io/cloud-spanner-emulator/emulator:1.5.17
	I0704 01:09:14.580702 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.719181 1196445 out.go:177]   - Using image docker.io/marcnuri/yakd:0.0.5
	I0704 01:09:14.580712 1196445 out.go:177] * Verifying Kubernetes components...
	I0704 01:09:14.721032 1196445 addons.go:431] installing /etc/kubernetes/addons/yakd-ns.yaml
	I0704 01:09:14.721051 1196445 ssh_runner.go:362] scp yakd/yakd-ns.yaml --> /etc/kubernetes/addons/yakd-ns.yaml (171 bytes)
	I0704 01:09:14.721122 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.580911 1196445 addons.go:69] Setting storage-provisioner-rancher=true in profile "addons-155517"
	I0704 01:09:14.580918 1196445 addons.go:69] Setting volcano=true in profile "addons-155517"
	I0704 01:09:14.723014 1196445 addons.go:234] Setting addon volcano=true in "addons-155517"
	I0704 01:09:14.580922 1196445 addons.go:69] Setting volumesnapshots=true in profile "addons-155517"
	I0704 01:09:14.723103 1196445 addons.go:234] Setting addon volumesnapshots=true in "addons-155517"
	I0704 01:09:14.723131 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.723415 1196445 addons.go:431] installing /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0704 01:09:14.723429 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/nvidia-device-plugin.yaml (1966 bytes)
	I0704 01:09:14.723497 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.729712 1196445 addons_storage_classes.go:33] enableOrDisableStorageClasses storage-provisioner-rancher=true on "addons-155517"
	I0704 01:09:14.730053 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.760655 1196445 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0704 01:09:14.760879 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.761414 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.767033 1196445 addons.go:431] installing /etc/kubernetes/addons/deployment.yaml
	I0704 01:09:14.767054 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/deployment.yaml (1004 bytes)
	I0704 01:09:14.767114 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.808186 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.808661 1196445 out.go:177]   - Using image registry.k8s.io/metrics-server/metrics-server:v0.7.1
	I0704 01:09:14.811351 1196445 addons.go:431] installing /etc/kubernetes/addons/metrics-apiservice.yaml
	I0704 01:09:14.811385 1196445 ssh_runner.go:362] scp metrics-server/metrics-apiservice.yaml --> /etc/kubernetes/addons/metrics-apiservice.yaml (424 bytes)
	I0704 01:09:14.811464 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.828255 1196445 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0704 01:09:14.830324 1196445 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.1
	I0704 01:09:14.854527 1196445 out.go:177]   - Using image gcr.io/k8s-minikube/minikube-ingress-dns:0.0.2
	I0704 01:09:14.854766 1196445 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0704 01:09:14.855575 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0704 01:09:14.855680 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.858854 1196445 addons.go:234] Setting addon default-storageclass=true in "addons-155517"
	I0704 01:09:14.858894 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.859317 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:14.876557 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:14.884711 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-provisioner:v3.3.0
	I0704 01:09:14.887689 1196445 addons.go:431] installing /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0704 01:09:14.887713 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-dns-pod.yaml (2442 bytes)
	I0704 01:09:14.887776 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.907776 1196445 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.1
	I0704 01:09:14.921547 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-attacher:v4.0.0
	I0704 01:09:14.946618 1196445 out.go:177]   - Using image registry.k8s.io/ingress-nginx/controller:v1.10.1
	I0704 01:09:14.952851 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:14.958655 1196445 addons.go:431] installing /etc/kubernetes/addons/ingress-deploy.yaml
	I0704 01:09:14.958677 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-deploy.yaml (16078 bytes)
	I0704 01:09:14.958757 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:14.989105 1196445 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.30.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.30.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0704 01:09:14.998292 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-external-health-monitor-controller:v0.7.0
	I0704 01:09:15.008457 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.010183 1196445 out.go:177]   - Using image docker.io/registry:2.8.3
	I0704 01:09:15.011903 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.6.0
	I0704 01:09:15.016345 1196445 addons.go:234] Setting addon storage-provisioner-rancher=true in "addons-155517"
	I0704 01:09:15.016398 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:15.016856 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:15.017248 1196445 out.go:177]   - Using image ghcr.io/inspektor-gadget/inspektor-gadget:v0.30.0
	I0704 01:09:15.026296 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/snapshot-controller:v6.1.0
	I0704 01:09:15.032293 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/hostpathplugin:v1.9.0
	I0704 01:09:15.033528 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.034311 1196445 out.go:177]   - Using image docker.io/volcanosh/vc-controller-manager:v1.9.0
	I0704 01:09:15.036144 1196445 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml
	I0704 01:09:15.036176 1196445 ssh_runner.go:362] scp volumesnapshots/csi-hostpath-snapshotclass.yaml --> /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml (934 bytes)
	I0704 01:09:15.036273 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.040125 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/livenessprobe:v2.8.0
	I0704 01:09:15.042262 1196445 out.go:177]   - Using image gcr.io/k8s-minikube/kube-registry-proxy:0.0.6
	I0704 01:09:15.042363 1196445 out.go:177]   - Using image docker.io/volcanosh/vc-scheduler:v1.9.0
	I0704 01:09:15.044439 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-namespace.yaml
	I0704 01:09:15.044460 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-namespace.yaml --> /etc/kubernetes/addons/ig-namespace.yaml (55 bytes)
	I0704 01:09:15.044547 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.052699 1196445 addons.go:431] installing /etc/kubernetes/addons/registry-rc.yaml
	I0704 01:09:15.052768 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-rc.yaml (798 bytes)
	I0704 01:09:15.052855 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.066354 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-resizer:v1.6.0
	I0704 01:09:15.068850 1196445 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-snapshotter:v6.1.0
	I0704 01:09:15.075588 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-external-attacher.yaml
	I0704 01:09:15.075620 1196445 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-attacher.yaml --> /etc/kubernetes/addons/rbac-external-attacher.yaml (3073 bytes)
	I0704 01:09:15.075727 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.088431 1196445 out.go:177]   - Using image docker.io/volcanosh/vc-webhook-manager:v1.9.0
	I0704 01:09:15.114822 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.115791 1196445 addons.go:431] installing /etc/kubernetes/addons/volcano-deployment.yaml
	I0704 01:09:15.115824 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volcano-deployment.yaml (442770 bytes)
	I0704 01:09:15.115909 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.121303 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.123349 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.160899 1196445 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0704 01:09:15.160920 1196445 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0704 01:09:15.160989 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.162700 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.243792 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.254669 1196445 out.go:177]   - Using image docker.io/busybox:stable
	I0704 01:09:15.256265 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.257205 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.257463 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.259664 1196445 out.go:177]   - Using image docker.io/rancher/local-path-provisioner:v0.0.22
	I0704 01:09:15.261691 1196445 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0704 01:09:15.261712 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner-rancher.yaml (3113 bytes)
	I0704 01:09:15.261776 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:15.265635 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	W0704 01:09:15.288056 1196445 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0704 01:09:15.288087 1196445 retry.go:31] will retry after 347.093048ms: ssh: handshake failed: EOF
	I0704 01:09:15.307720 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.310433 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:15.322864 1196445 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0704 01:09:15.588350 1196445 addons.go:431] installing /etc/kubernetes/addons/yakd-sa.yaml
	I0704 01:09:15.588385 1196445 ssh_runner.go:362] scp yakd/yakd-sa.yaml --> /etc/kubernetes/addons/yakd-sa.yaml (247 bytes)
	I0704 01:09:15.621674 1196445 addons.go:431] installing /etc/kubernetes/addons/metrics-server-deployment.yaml
	I0704 01:09:15.621748 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-deployment.yaml (1907 bytes)
	I0704 01:09:15.749570 1196445 addons.go:431] installing /etc/kubernetes/addons/metrics-server-rbac.yaml
	I0704 01:09:15.749601 1196445 ssh_runner.go:362] scp metrics-server/metrics-server-rbac.yaml --> /etc/kubernetes/addons/metrics-server-rbac.yaml (2175 bytes)
	I0704 01:09:15.934643 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0704 01:09:15.971372 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0704 01:09:16.022036 1196445 addons.go:431] installing /etc/kubernetes/addons/metrics-server-service.yaml
	I0704 01:09:16.022108 1196445 ssh_runner.go:362] scp metrics-server/metrics-server-service.yaml --> /etc/kubernetes/addons/metrics-server-service.yaml (446 bytes)
	I0704 01:09:16.110981 1196445 addons.go:431] installing /etc/kubernetes/addons/yakd-crb.yaml
	I0704 01:09:16.111009 1196445 ssh_runner.go:362] scp yakd/yakd-crb.yaml --> /etc/kubernetes/addons/yakd-crb.yaml (422 bytes)
	I0704 01:09:16.121017 1196445 addons.go:431] installing /etc/kubernetes/addons/registry-svc.yaml
	I0704 01:09:16.121043 1196445 ssh_runner.go:362] scp registry/registry-svc.yaml --> /etc/kubernetes/addons/registry-svc.yaml (398 bytes)
	I0704 01:09:16.190720 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0704 01:09:16.201668 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml
	I0704 01:09:16.213599 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0704 01:09:16.238689 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-serviceaccount.yaml
	I0704 01:09:16.238718 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-serviceaccount.yaml --> /etc/kubernetes/addons/ig-serviceaccount.yaml (80 bytes)
	I0704 01:09:16.244065 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml
	I0704 01:09:16.263426 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/deployment.yaml
	I0704 01:09:16.295999 1196445 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml
	I0704 01:09:16.296072 1196445 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotclasses.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml (6471 bytes)
	I0704 01:09:16.307564 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	I0704 01:09:16.362987 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0704 01:09:16.383918 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-hostpath.yaml
	I0704 01:09:16.383945 1196445 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-hostpath.yaml --> /etc/kubernetes/addons/rbac-hostpath.yaml (4266 bytes)
	I0704 01:09:16.394076 1196445 addons.go:431] installing /etc/kubernetes/addons/yakd-svc.yaml
	I0704 01:09:16.394102 1196445 ssh_runner.go:362] scp yakd/yakd-svc.yaml --> /etc/kubernetes/addons/yakd-svc.yaml (412 bytes)
	I0704 01:09:16.417925 1196445 addons.go:431] installing /etc/kubernetes/addons/registry-proxy.yaml
	I0704 01:09:16.417949 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-proxy.yaml (947 bytes)
	I0704 01:09:16.422065 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-role.yaml
	I0704 01:09:16.422088 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-role.yaml --> /etc/kubernetes/addons/ig-role.yaml (210 bytes)
	I0704 01:09:16.522801 1196445 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml
	I0704 01:09:16.522826 1196445 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotcontents.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml (23126 bytes)
	I0704 01:09:16.575145 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml
	I0704 01:09:16.575171 1196445 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-health-monitor-controller.yaml --> /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml (3038 bytes)
	I0704 01:09:16.653193 1196445 addons.go:431] installing /etc/kubernetes/addons/yakd-dp.yaml
	I0704 01:09:16.653217 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-dp.yaml (2017 bytes)
	I0704 01:09:16.659744 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-rolebinding.yaml
	I0704 01:09:16.659770 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-rolebinding.yaml --> /etc/kubernetes/addons/ig-rolebinding.yaml (244 bytes)
	I0704 01:09:16.661739 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml
	I0704 01:09:16.744662 1196445 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml
	I0704 01:09:16.744688 1196445 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshots.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml (19582 bytes)
	I0704 01:09:16.821862 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-external-provisioner.yaml
	I0704 01:09:16.821888 1196445 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-provisioner.yaml --> /etc/kubernetes/addons/rbac-external-provisioner.yaml (4442 bytes)
	I0704 01:09:16.881062 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml
	I0704 01:09:16.901455 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrole.yaml
	I0704 01:09:16.901481 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrole.yaml --> /etc/kubernetes/addons/ig-clusterrole.yaml (1485 bytes)
	I0704 01:09:16.928474 1196445 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.30.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.30.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -": (1.939327122s)
	I0704 01:09:16.928503 1196445 start.go:967] {"host.minikube.internal": 192.168.49.1} host record injected into CoreDNS's ConfigMap
	I0704 01:09:16.929502 1196445 ssh_runner.go:235] Completed: sudo systemctl start kubelet: (1.606615135s)
	I0704 01:09:16.930207 1196445 node_ready.go:35] waiting up to 6m0s for node "addons-155517" to be "Ready" ...
	I0704 01:09:16.937817 1196445 node_ready.go:49] node "addons-155517" has status "Ready":"True"
	I0704 01:09:16.937845 1196445 node_ready.go:38] duration metric: took 7.604392ms for node "addons-155517" to be "Ready" ...
	I0704 01:09:16.937855 1196445 pod_ready.go:35] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0704 01:09:16.962597 1196445 pod_ready.go:78] waiting up to 6m0s for pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:17.098912 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml
	I0704 01:09:17.098985 1196445 ssh_runner.go:362] scp volumesnapshots/rbac-volume-snapshot-controller.yaml --> /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml (3545 bytes)
	I0704 01:09:17.131539 1196445 addons.go:431] installing /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0704 01:09:17.131609 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml (1475 bytes)
	I0704 01:09:17.201528 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-external-resizer.yaml
	I0704 01:09:17.201552 1196445 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-resizer.yaml --> /etc/kubernetes/addons/rbac-external-resizer.yaml (2943 bytes)
	I0704 01:09:17.281563 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrolebinding.yaml
	I0704 01:09:17.281585 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrolebinding.yaml --> /etc/kubernetes/addons/ig-clusterrolebinding.yaml (274 bytes)
	I0704 01:09:17.383540 1196445 addons.go:431] installing /etc/kubernetes/addons/rbac-external-snapshotter.yaml
	I0704 01:09:17.383612 1196445 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-snapshotter.yaml --> /etc/kubernetes/addons/rbac-external-snapshotter.yaml (3149 bytes)
	I0704 01:09:17.431613 1196445 kapi.go:248] "coredns" deployment in "kube-system" namespace and "addons-155517" context rescaled to 1 replicas
	I0704 01:09:17.491254 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0704 01:09:17.630881 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-crd.yaml
	I0704 01:09:17.630958 1196445 ssh_runner.go:362] scp inspektor-gadget/ig-crd.yaml --> /etc/kubernetes/addons/ig-crd.yaml (5216 bytes)
	I0704 01:09:17.673774 1196445 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-attacher.yaml
	I0704 01:09:17.673845 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-attacher.yaml (2143 bytes)
	I0704 01:09:17.747064 1196445 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml
	I0704 01:09:17.747141 1196445 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-driverinfo.yaml --> /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml (1274 bytes)
	I0704 01:09:17.825561 1196445 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-plugin.yaml
	I0704 01:09:17.825630 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-plugin.yaml (8201 bytes)
	I0704 01:09:17.839617 1196445 addons.go:431] installing /etc/kubernetes/addons/ig-daemonset.yaml
	I0704 01:09:17.839687 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-daemonset.yaml (7735 bytes)
	I0704 01:09:18.021499 1196445 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-resizer.yaml
	I0704 01:09:18.021581 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-resizer.yaml (2191 bytes)
	I0704 01:09:18.066416 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml
	I0704 01:09:18.103144 1196445 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0704 01:09:18.103227 1196445 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-storageclass.yaml --> /etc/kubernetes/addons/csi-hostpath-storageclass.yaml (846 bytes)
	I0704 01:09:18.206551 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0704 01:09:18.972681 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:19.481076 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml: (3.546393824s)
	I0704 01:09:19.481267 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml: (3.509817533s)
	I0704 01:09:19.707411 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (3.516628882s)
	I0704 01:09:21.018313 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:22.094516 1196445 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_application_credentials.json (162 bytes)
	I0704 01:09:22.094602 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:22.120851 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:22.498250 1196445 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_cloud_project (12 bytes)
	I0704 01:09:22.672494 1196445 addons.go:234] Setting addon gcp-auth=true in "addons-155517"
	I0704 01:09:22.672553 1196445 host.go:66] Checking if "addons-155517" exists ...
	I0704 01:09:22.672999 1196445 cli_runner.go:164] Run: docker container inspect addons-155517 --format={{.State.Status}}
	I0704 01:09:22.693321 1196445 ssh_runner.go:195] Run: cat /var/lib/minikube/google_application_credentials.json
	I0704 01:09:22.693374 1196445 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-155517
	I0704 01:09:22.731686 1196445 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33941 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/addons-155517/id_rsa Username:docker}
	I0704 01:09:23.219625 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml: (7.017917581s)
	I0704 01:09:23.219660 1196445 addons.go:475] Verifying addon ingress=true in "addons-155517"
	I0704 01:09:23.219801 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: (7.006176358s)
	I0704 01:09:23.223048 1196445 out.go:177] * Verifying ingress addon...
	I0704 01:09:23.225816 1196445 kapi.go:75] Waiting for pod with label "app.kubernetes.io/name=ingress-nginx" in ns "ingress-nginx" ...
	I0704 01:09:23.233823 1196445 kapi.go:86] Found 3 Pods for label selector app.kubernetes.io/name=ingress-nginx
	I0704 01:09:23.233848 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:23.469592 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:23.733369 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:24.252870 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:24.738392 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:25.197716 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml: (8.95361222s)
	I0704 01:09:25.197838 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/deployment.yaml: (8.934389448s)
	I0704 01:09:25.197945 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: (8.890302325s)
	I0704 01:09:25.197981 1196445 addons.go:475] Verifying addon metrics-server=true in "addons-155517"
	I0704 01:09:25.198051 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml: (8.835024851s)
	I0704 01:09:25.198102 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml: (8.536339738s)
	I0704 01:09:25.198137 1196445 addons.go:475] Verifying addon registry=true in "addons-155517"
	I0704 01:09:25.198304 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml: (8.317214693s)
	I0704 01:09:25.198380 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (7.707056177s)
	W0704 01:09:25.199807 1196445 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0704 01:09:25.199830 1196445 retry.go:31] will retry after 346.398491ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0704 01:09:25.198438 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml: (7.131941092s)
	I0704 01:09:25.201793 1196445 out.go:177] * Verifying registry addon...
	I0704 01:09:25.201794 1196445 out.go:177] * To access YAKD - Kubernetes Dashboard, wait for Pod to be ready and run the following command:
	
		minikube -p addons-155517 service yakd-dashboard -n yakd-dashboard
	
	I0704 01:09:25.204942 1196445 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=registry" in ns "kube-system" ...
	I0704 01:09:25.222617 1196445 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=registry
	I0704 01:09:25.222690 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:25.251428 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:25.492382 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:25.546710 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0704 01:09:25.735322 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:25.736772 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:25.911009 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml: (7.704355252s)
	I0704 01:09:25.911047 1196445 addons.go:475] Verifying addon csi-hostpath-driver=true in "addons-155517"
	I0704 01:09:25.911278 1196445 ssh_runner.go:235] Completed: cat /var/lib/minikube/google_application_credentials.json: (3.217934924s)
	I0704 01:09:25.914361 1196445 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.1
	I0704 01:09:25.914416 1196445 out.go:177] * Verifying csi-hostpath-driver addon...
	I0704 01:09:25.916249 1196445 out.go:177]   - Using image gcr.io/k8s-minikube/gcp-auth-webhook:v0.1.2
	I0704 01:09:25.917135 1196445 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=csi-hostpath-driver" in ns "kube-system" ...
	I0704 01:09:25.918627 1196445 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-ns.yaml
	I0704 01:09:25.918653 1196445 ssh_runner.go:362] scp gcp-auth/gcp-auth-ns.yaml --> /etc/kubernetes/addons/gcp-auth-ns.yaml (700 bytes)
	I0704 01:09:25.925376 1196445 kapi.go:86] Found 3 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
	I0704 01:09:25.925401 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:25.957453 1196445 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-service.yaml
	I0704 01:09:25.957479 1196445 ssh_runner.go:362] scp gcp-auth/gcp-auth-service.yaml --> /etc/kubernetes/addons/gcp-auth-service.yaml (788 bytes)
	I0704 01:09:25.985833 1196445 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0704 01:09:25.985857 1196445 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/gcp-auth-webhook.yaml (5421 bytes)
	I0704 01:09:26.010928 1196445 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0704 01:09:26.210069 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:26.232731 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:26.424161 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:26.710162 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:26.731168 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:26.923170 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:27.210427 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:27.230934 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:27.250410 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (1.703595614s)
	I0704 01:09:27.250521 1196445 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml: (1.23956747s)
	I0704 01:09:27.253393 1196445 addons.go:475] Verifying addon gcp-auth=true in "addons-155517"
	I0704 01:09:27.257553 1196445 out.go:177] * Verifying gcp-auth addon...
	I0704 01:09:27.260382 1196445 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=gcp-auth" in ns "gcp-auth" ...
	I0704 01:09:27.262998 1196445 kapi.go:86] Found 0 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0704 01:09:27.424027 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:27.710878 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:27.730370 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:27.923796 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:27.969295 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:28.211580 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:28.231431 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:28.422656 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:28.711283 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:28.730044 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:28.924689 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:29.211847 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:29.231572 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:29.425562 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:29.710139 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:29.731027 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:29.923390 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:30.210230 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:30.231858 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:30.424018 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:30.469196 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:30.709718 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:30.730263 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:30.922825 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:31.219839 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:31.241048 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:31.425597 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:31.712545 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:31.731465 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:31.922640 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:32.210366 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:32.230954 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:32.422551 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:32.710061 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:32.730153 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:32.925535 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:32.968875 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:33.209869 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:33.230720 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:33.423400 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:33.710614 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:33.730154 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:33.923184 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:34.211208 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:34.230331 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:34.422808 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:34.710269 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:34.730201 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:34.923588 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:34.969882 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:35.210283 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:35.230873 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:35.422763 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:35.709465 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:35.730270 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:35.923232 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:36.209479 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:36.229850 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:36.423197 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:36.710129 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:36.730676 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:36.922710 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:37.209525 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:37.230493 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:37.424298 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:37.468432 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:37.710143 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:37.730780 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:37.922415 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:38.209676 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:38.230719 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:38.422303 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:38.710124 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:38.730126 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:38.922826 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:39.210085 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:39.230519 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:39.423189 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:39.468806 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:39.710065 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:39.730167 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:39.923743 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:40.209304 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:40.230675 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:40.422186 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:40.710013 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:40.730219 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:40.923123 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:41.211576 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:41.237098 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:41.423541 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:41.469660 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:41.710859 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:41.731203 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:41.923394 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:42.210467 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:42.231587 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:42.423307 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:42.709772 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:42.729958 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:42.923400 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:43.210064 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:43.230380 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:43.423544 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:43.709869 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:43.730334 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:43.925613 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:43.976063 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:44.216521 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:44.233500 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:44.423984 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:44.709890 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:44.730491 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:44.922620 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:45.218269 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:45.232042 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:45.423431 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:45.709814 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:45.730412 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:45.924578 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:46.210332 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:46.231072 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:46.424267 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:46.482456 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:46.709782 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:46.730358 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:46.925109 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:47.210160 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:47.230642 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:47.422979 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:47.710273 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:47.730748 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:47.937531 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:48.210020 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:48.230451 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:48.423194 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:48.709898 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:48.732172 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:48.923290 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:48.969889 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:49.212283 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:49.230721 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:49.423247 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:49.710414 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:49.730459 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:49.922796 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:50.210560 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:50.233100 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:50.423363 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:50.710514 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:50.733774 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:50.922714 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:51.211285 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:51.230995 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:51.423332 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:51.472262 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:51.710160 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:51.730450 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:51.925154 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:52.210272 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:52.231397 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:52.423658 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:52.710851 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:52.730326 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:52.923960 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:53.212484 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:53.232166 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:53.424097 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:53.710891 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:53.731740 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:53.924019 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:53.968656 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:54.211161 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:54.232059 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:54.423259 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:54.712488 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:54.731251 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:54.924094 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:55.210510 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:55.231539 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:55.422966 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:55.711791 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:55.733623 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:55.930765 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:55.969815 1196445 pod_ready.go:102] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"False"
	I0704 01:09:56.210040 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:56.230481 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:56.423900 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:56.717782 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:56.733051 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:56.924352 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:57.210192 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:57.231623 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:57.423375 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:57.712974 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:57.739221 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:57.924312 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:58.210188 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:58.231084 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:58.428301 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:58.469048 1196445 pod_ready.go:92] pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace has status "Ready":"True"
	I0704 01:09:58.469072 1196445 pod_ready.go:81] duration metric: took 41.506375249s for pod "coredns-7db6d8ff4d-5x2l7" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.469083 1196445 pod_ready.go:78] waiting up to 6m0s for pod "coredns-7db6d8ff4d-kd68p" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.470959 1196445 pod_ready.go:97] error getting pod "coredns-7db6d8ff4d-kd68p" in "kube-system" namespace (skipping!): pods "coredns-7db6d8ff4d-kd68p" not found
	I0704 01:09:58.470985 1196445 pod_ready.go:81] duration metric: took 1.892793ms for pod "coredns-7db6d8ff4d-kd68p" in "kube-system" namespace to be "Ready" ...
	E0704 01:09:58.470996 1196445 pod_ready.go:66] WaitExtra: waitPodCondition: error getting pod "coredns-7db6d8ff4d-kd68p" in "kube-system" namespace (skipping!): pods "coredns-7db6d8ff4d-kd68p" not found
	I0704 01:09:58.471003 1196445 pod_ready.go:78] waiting up to 6m0s for pod "etcd-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.476539 1196445 pod_ready.go:92] pod "etcd-addons-155517" in "kube-system" namespace has status "Ready":"True"
	I0704 01:09:58.476565 1196445 pod_ready.go:81] duration metric: took 5.554425ms for pod "etcd-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.476580 1196445 pod_ready.go:78] waiting up to 6m0s for pod "kube-apiserver-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.482116 1196445 pod_ready.go:92] pod "kube-apiserver-addons-155517" in "kube-system" namespace has status "Ready":"True"
	I0704 01:09:58.482141 1196445 pod_ready.go:81] duration metric: took 5.552694ms for pod "kube-apiserver-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.482153 1196445 pod_ready.go:78] waiting up to 6m0s for pod "kube-controller-manager-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.488450 1196445 pod_ready.go:92] pod "kube-controller-manager-addons-155517" in "kube-system" namespace has status "Ready":"True"
	I0704 01:09:58.488476 1196445 pod_ready.go:81] duration metric: took 6.314983ms for pod "kube-controller-manager-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.488488 1196445 pod_ready.go:78] waiting up to 6m0s for pod "kube-proxy-62r6j" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.667045 1196445 pod_ready.go:92] pod "kube-proxy-62r6j" in "kube-system" namespace has status "Ready":"True"
	I0704 01:09:58.667072 1196445 pod_ready.go:81] duration metric: took 178.576255ms for pod "kube-proxy-62r6j" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.667083 1196445 pod_ready.go:78] waiting up to 6m0s for pod "kube-scheduler-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:58.709864 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:58.730495 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:58.923133 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:59.067974 1196445 pod_ready.go:92] pod "kube-scheduler-addons-155517" in "kube-system" namespace has status "Ready":"True"
	I0704 01:09:59.068046 1196445 pod_ready.go:81] duration metric: took 400.954049ms for pod "kube-scheduler-addons-155517" in "kube-system" namespace to be "Ready" ...
	I0704 01:09:59.068072 1196445 pod_ready.go:38] duration metric: took 42.130204788s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0704 01:09:59.068115 1196445 api_server.go:52] waiting for apiserver process to appear ...
	I0704 01:09:59.068215 1196445 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0704 01:09:59.083368 1196445 api_server.go:72] duration metric: took 44.510664696s to wait for apiserver process to appear ...
	I0704 01:09:59.083466 1196445 api_server.go:88] waiting for apiserver healthz status ...
	I0704 01:09:59.083524 1196445 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0704 01:09:59.092379 1196445 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0704 01:09:59.093665 1196445 api_server.go:141] control plane version: v1.30.2
	I0704 01:09:59.093689 1196445 api_server.go:131] duration metric: took 10.180723ms to wait for apiserver health ...
	I0704 01:09:59.093697 1196445 system_pods.go:43] waiting for kube-system pods to appear ...
	I0704 01:09:59.210881 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:59.230670 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:59.277516 1196445 system_pods.go:59] 18 kube-system pods found
	I0704 01:09:59.277591 1196445 system_pods.go:61] "coredns-7db6d8ff4d-5x2l7" [6344a526-e705-4a50-9a44-66c0d35a0ca8] Running
	I0704 01:09:59.277614 1196445 system_pods.go:61] "csi-hostpath-attacher-0" [e4b634d1-4641-4cde-bcdc-b5e48be74e6b] Pending / Ready:ContainersNotReady (containers with unready status: [csi-attacher]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-attacher])
	I0704 01:09:59.277638 1196445 system_pods.go:61] "csi-hostpath-resizer-0" [4f1a6b44-70cb-43a2-bec7-e0213f06ffd3] Pending / Ready:ContainersNotReady (containers with unready status: [csi-resizer]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-resizer])
	I0704 01:09:59.277674 1196445 system_pods.go:61] "csi-hostpathplugin-bwns5" [a5928f29-2395-4b5b-b09f-baae6183f4ff] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
	I0704 01:09:59.277700 1196445 system_pods.go:61] "etcd-addons-155517" [119c13b1-a5b8-434b-bc29-7f32f6a9ed2b] Running
	I0704 01:09:59.277723 1196445 system_pods.go:61] "kindnet-7qr8x" [c5fbe50b-fa8d-4022-8aa3-bbbca5f27060] Running
	I0704 01:09:59.277743 1196445 system_pods.go:61] "kube-apiserver-addons-155517" [c515e441-ac73-42b4-9e34-b5d4b684a9a4] Running
	I0704 01:09:59.277782 1196445 system_pods.go:61] "kube-controller-manager-addons-155517" [c9334f1b-c695-4aec-aa4d-b853d9bf214c] Running
	I0704 01:09:59.277811 1196445 system_pods.go:61] "kube-ingress-dns-minikube" [b7e45300-1c0f-463c-914d-3febc516e196] Running / Ready:ContainersNotReady (containers with unready status: [minikube-ingress-dns]) / ContainersReady:ContainersNotReady (containers with unready status: [minikube-ingress-dns])
	I0704 01:09:59.277834 1196445 system_pods.go:61] "kube-proxy-62r6j" [4818ac60-abd9-4281-90e2-df11f62e8455] Running
	I0704 01:09:59.277855 1196445 system_pods.go:61] "kube-scheduler-addons-155517" [1802b842-df6a-4872-b790-17ac5e8ad808] Running
	I0704 01:09:59.277890 1196445 system_pods.go:61] "metrics-server-c59844bb4-csqts" [d63007ff-883d-4b4c-b4e4-b83cf3f5e613] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
	I0704 01:09:59.277919 1196445 system_pods.go:61] "nvidia-device-plugin-daemonset-gr25g" [8a2a5166-3ff2-4a7c-8665-62fade8bd24f] Pending / Ready:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr]) / ContainersReady:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr])
	I0704 01:09:59.277940 1196445 system_pods.go:61] "registry-dm5v6" [15499167-b529-4f01-b177-75b6be18e2b5] Running
	I0704 01:09:59.277965 1196445 system_pods.go:61] "registry-proxy-fndt4" [36d06042-3c9e-400c-a673-4f8f17e23b46] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
	I0704 01:09:59.278000 1196445 system_pods.go:61] "snapshot-controller-745499f584-k4nhl" [2536525a-d8b5-4b7d-85e5-2a92611f9623] Running
	I0704 01:09:59.278027 1196445 system_pods.go:61] "snapshot-controller-745499f584-sc4kq" [1a35e6e8-e90a-4d20-972a-3158fa6b4d10] Running
	I0704 01:09:59.278049 1196445 system_pods.go:61] "storage-provisioner" [12fb9780-61c0-4ea8-9b4b-e054c37b7af8] Running
	I0704 01:09:59.278072 1196445 system_pods.go:74] duration metric: took 184.367763ms to wait for pod list to return data ...
	I0704 01:09:59.278106 1196445 default_sa.go:34] waiting for default service account to be created ...
	I0704 01:09:59.423626 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:09:59.467163 1196445 default_sa.go:45] found service account: "default"
	I0704 01:09:59.467239 1196445 default_sa.go:55] duration metric: took 189.106033ms for default service account to be created ...
	I0704 01:09:59.467265 1196445 system_pods.go:116] waiting for k8s-apps to be running ...
	I0704 01:09:59.676387 1196445 system_pods.go:86] 18 kube-system pods found
	I0704 01:09:59.676469 1196445 system_pods.go:89] "coredns-7db6d8ff4d-5x2l7" [6344a526-e705-4a50-9a44-66c0d35a0ca8] Running
	I0704 01:09:59.676493 1196445 system_pods.go:89] "csi-hostpath-attacher-0" [e4b634d1-4641-4cde-bcdc-b5e48be74e6b] Pending / Ready:ContainersNotReady (containers with unready status: [csi-attacher]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-attacher])
	I0704 01:09:59.676519 1196445 system_pods.go:89] "csi-hostpath-resizer-0" [4f1a6b44-70cb-43a2-bec7-e0213f06ffd3] Pending / Ready:ContainersNotReady (containers with unready status: [csi-resizer]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-resizer])
	I0704 01:09:59.676561 1196445 system_pods.go:89] "csi-hostpathplugin-bwns5" [a5928f29-2395-4b5b-b09f-baae6183f4ff] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
	I0704 01:09:59.676582 1196445 system_pods.go:89] "etcd-addons-155517" [119c13b1-a5b8-434b-bc29-7f32f6a9ed2b] Running
	I0704 01:09:59.676604 1196445 system_pods.go:89] "kindnet-7qr8x" [c5fbe50b-fa8d-4022-8aa3-bbbca5f27060] Running
	I0704 01:09:59.676642 1196445 system_pods.go:89] "kube-apiserver-addons-155517" [c515e441-ac73-42b4-9e34-b5d4b684a9a4] Running
	I0704 01:09:59.676667 1196445 system_pods.go:89] "kube-controller-manager-addons-155517" [c9334f1b-c695-4aec-aa4d-b853d9bf214c] Running
	I0704 01:09:59.676692 1196445 system_pods.go:89] "kube-ingress-dns-minikube" [b7e45300-1c0f-463c-914d-3febc516e196] Running / Ready:ContainersNotReady (containers with unready status: [minikube-ingress-dns]) / ContainersReady:ContainersNotReady (containers with unready status: [minikube-ingress-dns])
	I0704 01:09:59.676714 1196445 system_pods.go:89] "kube-proxy-62r6j" [4818ac60-abd9-4281-90e2-df11f62e8455] Running
	I0704 01:09:59.676748 1196445 system_pods.go:89] "kube-scheduler-addons-155517" [1802b842-df6a-4872-b790-17ac5e8ad808] Running
	I0704 01:09:59.677464 1196445 system_pods.go:89] "metrics-server-c59844bb4-csqts" [d63007ff-883d-4b4c-b4e4-b83cf3f5e613] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
	I0704 01:09:59.677489 1196445 system_pods.go:89] "nvidia-device-plugin-daemonset-gr25g" [8a2a5166-3ff2-4a7c-8665-62fade8bd24f] Pending / Ready:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr]) / ContainersReady:ContainersNotReady (containers with unready status: [nvidia-device-plugin-ctr])
	I0704 01:09:59.677516 1196445 system_pods.go:89] "registry-dm5v6" [15499167-b529-4f01-b177-75b6be18e2b5] Running
	I0704 01:09:59.677555 1196445 system_pods.go:89] "registry-proxy-fndt4" [36d06042-3c9e-400c-a673-4f8f17e23b46] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
	I0704 01:09:59.677573 1196445 system_pods.go:89] "snapshot-controller-745499f584-k4nhl" [2536525a-d8b5-4b7d-85e5-2a92611f9623] Running
	I0704 01:09:59.677594 1196445 system_pods.go:89] "snapshot-controller-745499f584-sc4kq" [1a35e6e8-e90a-4d20-972a-3158fa6b4d10] Running
	I0704 01:09:59.677626 1196445 system_pods.go:89] "storage-provisioner" [12fb9780-61c0-4ea8-9b4b-e054c37b7af8] Running
	I0704 01:09:59.677655 1196445 system_pods.go:126] duration metric: took 210.368801ms to wait for k8s-apps to be running ...
	I0704 01:09:59.677675 1196445 system_svc.go:44] waiting for kubelet service to be running ....
	I0704 01:09:59.677762 1196445 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0704 01:09:59.696237 1196445 system_svc.go:56] duration metric: took 18.553501ms WaitForService to wait for kubelet
	I0704 01:09:59.696315 1196445 kubeadm.go:576] duration metric: took 45.12361654s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0704 01:09:59.696350 1196445 node_conditions.go:102] verifying NodePressure condition ...
	I0704 01:09:59.714261 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:09:59.736009 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:09:59.867078 1196445 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0704 01:09:59.867113 1196445 node_conditions.go:123] node cpu capacity is 2
	I0704 01:09:59.867127 1196445 node_conditions.go:105] duration metric: took 170.755162ms to run NodePressure ...
	I0704 01:09:59.867149 1196445 start.go:241] waiting for startup goroutines ...
	I0704 01:09:59.923389 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:00.218625 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:00.241517 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:00.438305 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:00.710808 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:00.730534 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:00.926440 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:01.210272 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:01.232796 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:01.429002 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:01.710041 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:01.730869 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:01.924722 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:02.210497 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:02.231671 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:02.438549 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:02.710555 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:02.735794 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:02.924538 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:03.210613 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:03.231512 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:03.424944 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:03.711675 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:03.733187 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:03.929527 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:04.209595 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:04.230995 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:04.427465 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:04.710195 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:04.731186 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:04.924972 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:05.211604 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:05.231389 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:05.423431 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:05.711411 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:05.734962 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:05.923586 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:06.210512 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:06.231004 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:06.423318 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:06.711560 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:06.730836 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:06.938452 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:07.210046 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:07.230933 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:07.425316 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:07.710091 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:07.730834 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:07.922615 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:08.211590 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:08.230434 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:08.423538 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:08.710046 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:08.733862 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:08.924256 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:09.209864 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:09.230148 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:09.423756 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:09.710673 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:09.731199 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:09.925117 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:10.212967 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:10.232598 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:10.424087 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:10.713769 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:10.730323 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:10.923900 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:11.210573 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:11.231728 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:11.425825 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:11.709221 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:11.730266 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:11.923382 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:12.209500 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:12.230865 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:12.422855 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:12.709768 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:12.731470 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:12.923005 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:13.209436 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:13.230328 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:13.422933 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:13.709576 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:13.730924 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:13.922558 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:14.210379 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:14.230443 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:14.423308 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:14.710027 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:14.730518 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:14.922814 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:15.211531 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:15.230606 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:15.423172 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:15.711224 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:15.730357 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:15.923854 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:16.214100 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:16.236362 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:16.422969 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:16.709735 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:16.730186 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:16.924010 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:17.209861 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:17.230221 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:17.429620 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:17.711216 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:17.731288 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:17.923272 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:18.210800 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:18.230675 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:18.423721 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:18.709748 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:18.730911 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:18.924515 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:19.210261 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:19.231378 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:19.424525 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:19.709993 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:19.732993 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:19.924083 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:20.210003 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:20.230897 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:20.423785 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:20.711057 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:20.730952 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:20.924157 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:21.210312 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:21.230586 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:21.423795 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:21.710239 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:21.732286 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:21.926006 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:22.210368 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0704 01:10:22.230949 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:22.423210 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:22.709928 1196445 kapi.go:107] duration metric: took 57.504984289s to wait for kubernetes.io/minikube-addons=registry ...
	I0704 01:10:22.731279 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:22.923850 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:23.230884 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:23.423811 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:23.730105 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:23.922926 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:24.229930 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:24.423940 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:24.733279 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:24.923057 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:25.232439 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:25.423165 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:25.731393 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:25.924116 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:26.230888 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:26.423217 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:26.730398 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:26.922554 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:27.231244 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:27.427320 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:27.732045 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:27.924673 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:28.231378 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:28.423152 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:28.730530 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:28.922455 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:29.231220 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:29.423963 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:29.731263 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:29.923847 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:30.234736 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:30.424146 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:30.730525 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:30.923960 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:31.230545 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:31.422889 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:31.730356 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:31.922685 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:32.230220 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:32.423403 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:32.731689 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:32.923390 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:33.230844 1196445 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0704 01:10:33.424895 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:33.734207 1196445 kapi.go:107] duration metric: took 1m10.508389549s to wait for app.kubernetes.io/name=ingress-nginx ...
	I0704 01:10:33.927331 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:34.424003 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:34.924230 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:35.422420 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0704 01:10:35.922720 1196445 kapi.go:107] duration metric: took 1m10.005581993s to wait for kubernetes.io/minikube-addons=csi-hostpath-driver ...
	I0704 01:10:50.263958 1196445 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0704 01:10:50.263985 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0704 01:10:50.764249 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0704 01:10:51.264467 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0704 01:10:51.764892 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0704 01:10:52.264523 1196445 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0704 01:10:52.764246 1196445 kapi.go:107] duration metric: took 1m25.503859259s to wait for kubernetes.io/minikube-addons=gcp-auth ...
	I0704 01:10:52.766320 1196445 out.go:177] * Your GCP credentials will now be mounted into every pod created in the addons-155517 cluster.
	I0704 01:10:52.767887 1196445 out.go:177] * If you don't want your credentials mounted into a specific pod, add a label with the `gcp-auth-skip-secret` key to your pod configuration.
	I0704 01:10:52.769442 1196445 out.go:177] * If you want existing pods to be mounted with credentials, either recreate them or rerun addons enable with --refresh.
	I0704 01:10:52.771171 1196445 out.go:177] * Enabled addons: nvidia-device-plugin, storage-provisioner-rancher, storage-provisioner, default-storageclass, volcano, cloud-spanner, metrics-server, ingress-dns, inspektor-gadget, yakd, volumesnapshots, registry, ingress, csi-hostpath-driver, gcp-auth
	I0704 01:10:52.772773 1196445 addons.go:510] duration metric: took 1m38.199659863s for enable addons: enabled=[nvidia-device-plugin storage-provisioner-rancher storage-provisioner default-storageclass volcano cloud-spanner metrics-server ingress-dns inspektor-gadget yakd volumesnapshots registry ingress csi-hostpath-driver gcp-auth]
	I0704 01:10:52.772819 1196445 start.go:246] waiting for cluster config update ...
	I0704 01:10:52.772852 1196445 start.go:255] writing updated cluster config ...
	I0704 01:10:52.773191 1196445 ssh_runner.go:195] Run: rm -f paused
	I0704 01:10:53.108848 1196445 start.go:600] kubectl: 1.30.2, cluster: 1.30.2 (minor skew: 0)
	I0704 01:10:53.111322 1196445 out.go:177] * Done! kubectl is now configured to use "addons-155517" cluster and "default" namespace by default
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	43dd36d5979cb       1499ed4fbd0aa       About a minute ago   Exited              minikube-ingress-dns      5                   e7a0f4dc02aff       kube-ingress-dns-minikube
	1e595060faf3e       6cb7dcc2008fa       3 minutes ago        Running             headlamp                  0                   2bdbce3d0506c       headlamp-7867546754-xmxdd
	176e35400e6b4       6ef582f3ec844       3 minutes ago        Running             gcp-auth                  0                   ef5df83821e38       gcp-auth-5db96cd9b4-s44p2
	368ff31908f31       adf677c1adaa5       3 minutes ago        Running             controller                0                   68ecc323fb44a       ingress-nginx-controller-768f948f8f-rqqqs
	671aa72a09a1e       8b46b1cd48760       4 minutes ago        Running             admission                 0                   85a1b65a34b3a       volcano-admission-5f7844f7bc-kv4hh
	3ea1321729282       296b5f799fcd8       4 minutes ago        Exited              patch                     2                   8383cc7b690d2       ingress-nginx-admission-patch-4kk74
	bcb962151c498       1505f556b3a7b       4 minutes ago        Running             volcano-controllers       0                   969b0063a7566       volcano-controllers-59cb4746db-b7bd8
	591fbae21d6c8       d1ca868ab82aa       4 minutes ago        Running             gadget                    2                   3d0e4ad5c6381       gadget-9pgwd
	e0f993f36de9c       d9c7ad4c226bf       4 minutes ago        Running             volcano-scheduler         0                   60ad9646728c0       volcano-scheduler-844f6db89b-bwtk7
	0e4215cbc9d2f       95dccb4df54ab       4 minutes ago        Running             metrics-server            0                   737497a744646       metrics-server-c59844bb4-csqts
	b600d65f7bbdc       77bdba588b953       4 minutes ago        Running             yakd                      0                   c7305ce05f616       yakd-dashboard-799879c74f-gdj2b
	2ba7cf4a30a86       296b5f799fcd8       4 minutes ago        Exited              create                    0                   9bbb6e04b6a06       ingress-nginx-admission-create-b7mmt
	57beab3f7e83e       2437cf7621777       4 minutes ago        Running             coredns                   0                   c1dabb0588375       coredns-7db6d8ff4d-5x2l7
	ae2716d367698       d1ca868ab82aa       4 minutes ago        Exited              gadget                    1                   3d0e4ad5c6381       gadget-9pgwd
	33899ba7d5911       ba04bb24b9575       5 minutes ago        Running             storage-provisioner       0                   b9a422421a145       storage-provisioner
	67cab13d85edc       89d73d416b992       5 minutes ago        Running             kindnet-cni               0                   d962f3914dbda       kindnet-7qr8x
	8a696cf44b3b1       66dbb96a9149f       5 minutes ago        Running             kube-proxy                0                   e274a6aef2e08       kube-proxy-62r6j
	d26768fcef3f9       e1dcc3400d3ea       5 minutes ago        Running             kube-controller-manager   0                   fad3a49558544       kube-controller-manager-addons-155517
	566fe4aca8adb       c7dd04b1bafeb       5 minutes ago        Running             kube-scheduler            0                   55af540582842       kube-scheduler-addons-155517
	d5678b588829b       84c601f3f72c8       5 minutes ago        Running             kube-apiserver            0                   6b54dc456af60       kube-apiserver-addons-155517
	f517c28ada419       014faa467e297       5 minutes ago        Running             etcd                      0                   edf3cfc1bc506       etcd-addons-155517
	
	
	==> containerd <==
	Jul 04 01:13:51 addons-155517 containerd[812]: time="2024-07-04T01:13:51.954118171Z" level=error msg="ttrpc: received message on inactive stream" stream=7187
	Jul 04 01:13:51 addons-155517 containerd[812]: time="2024-07-04T01:13:51.955557787Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"fae3910a7f71ee67b3899bc6e873bf6c7f031b691b406af629dde1ca041b8770\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:13:51 addons-155517 containerd[812]: time="2024-07-04T01:13:51.966197615Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"dbf3c30e0131f13d35d9cebc2d6b59c422aba598407f5aa534ee1459fba435de\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:13:51 addons-155517 containerd[812]: time="2024-07-04T01:13:51.977063580Z" level=error msg="ttrpc: received message on inactive stream" stream=7243
	Jul 04 01:13:51 addons-155517 containerd[812]: time="2024-07-04T01:13:51.977681241Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"473010c2e2e366bd353df65d79f825406412d765408c6981a527648ef11f8b48\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:13:56 addons-155517 containerd[812]: time="2024-07-04T01:13:56.952967429Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"0616b875994993289990adf66114d410f6e63f5ea8054266312d3bc5a042c2aa\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:13:56 addons-155517 containerd[812]: time="2024-07-04T01:13:56.965137825Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"3bafec2af7fd71e7ae08f95562d446214cefdad01e6b00d9b9be3eb99acca3ff\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:13:56 addons-155517 containerd[812]: time="2024-07-04T01:13:56.975195405Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"75c1c117f2fe9439d2ecc920f497a59d99c09f51b1f598a8e8b47de122d69a9d\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:14:01 addons-155517 containerd[812]: time="2024-07-04T01:14:01.958931127Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"f0d5dcf9020ccf3bfcde46bff6f5e02e5ad96698f331f999f2c7d800cab562d3\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:14:01 addons-155517 containerd[812]: time="2024-07-04T01:14:01.976475585Z" level=error msg="ttrpc: received message on inactive stream" stream=7383
	Jul 04 01:14:01 addons-155517 containerd[812]: time="2024-07-04T01:14:01.977269915Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"71be4c725f35f551b6440267c15095bc603ec9be226a7b6bb8ed0c16284ed289\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:14:01 addons-155517 containerd[812]: time="2024-07-04T01:14:01.992891182Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"dc3cc6f9520e8748dad0e3b8a425b5e5a6bf4a4dd506d206a435d58918f5bad5\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:14:06 addons-155517 containerd[812]: time="2024-07-04T01:14:06.953919644Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"75fdd0b6ed224c35ed0f9dca5f583690c6008531b102b05de502b348ceff3761\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:14:06 addons-155517 containerd[812]: time="2024-07-04T01:14:06.964891568Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"4b67d2c6c06505c4fec720789417c1232a474d98478ce53a758eff74d48ba48c\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:14:06 addons-155517 containerd[812]: time="2024-07-04T01:14:06.975004195Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"f2f37d9b08fde4ae4a2496914674962f1c2c58938a0f0a5605e0873e0e6768cb\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:14:11 addons-155517 containerd[812]: time="2024-07-04T01:14:11.952917947Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"25c148ffc029dbe201c9dd7de03ce925eb99f700ebf153ca98f4103b16f942ee\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:14:11 addons-155517 containerd[812]: time="2024-07-04T01:14:11.965068470Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"ce8b348563b028bee450bbc393c6c29369a8e0495d4601fa8c2f604983c066ee\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:14:11 addons-155517 containerd[812]: time="2024-07-04T01:14:11.984800155Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"3745c496c86d9cd76163a5ef24ab9fc6cb6357d974edb13238717564da18774a\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:14:16 addons-155517 containerd[812]: time="2024-07-04T01:14:16.952436624Z" level=error msg="ttrpc: received message on inactive stream" stream=7615
	Jul 04 01:14:16 addons-155517 containerd[812]: time="2024-07-04T01:14:16.952967411Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"d25b50a3968c81e10b4d9e050f2fd22ac7fc7a146258107b7bf6212e40bb0f18\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:14:16 addons-155517 containerd[812]: time="2024-07-04T01:14:16.963204663Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"8041c7c67d9719a46d30e977ba67b2481924dba2d12d01df50b559b5ffe57254\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:14:16 addons-155517 containerd[812]: time="2024-07-04T01:14:16.974080063Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"963349482a98c4e27fceb4de1e6d3128101eefd0cf7cd61657db3e377da3efb3\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:14:21 addons-155517 containerd[812]: time="2024-07-04T01:14:21.962746390Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"6aae312871acfc86842bfabec3da74b8a315f376c4851b63de8ac9f9b38cbd82\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:14:21 addons-155517 containerd[812]: time="2024-07-04T01:14:21.976983039Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"464b92ce1eb80b1fe3a181a98d9d87225d19739b41f8559cd82cdcd0acb66e42\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Jul 04 01:14:21 addons-155517 containerd[812]: time="2024-07-04T01:14:21.990434227Z" level=error msg="ExecSync for \"591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222\" failed" error="failed to exec in container: failed to start exec \"15ef3e861adab8f29a90d796d4aa7e718a2df457957cfb2add89eb97d14288bf\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	
	
	==> coredns [57beab3f7e83e507ce7e6fb884cd0d41c8e35bd5a2316cbae1f3e08b24e70f6c] <==
	[INFO] 10.244.0.20:35305 - 1640 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002015416s
	[INFO] 10.244.0.20:58616 - 26989 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000222387s
	[INFO] 10.244.0.20:58616 - 4627 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000315202s
	[INFO] 10.244.0.20:59633 - 20803 "A IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.000102471s
	[INFO] 10.244.0.20:59633 - 1615 "AAAA IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.000051782s
	[INFO] 10.244.0.20:34752 - 48017 "A IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.000062891s
	[INFO] 10.244.0.20:34752 - 7827 "AAAA IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.000040179s
	[INFO] 10.244.0.20:58393 - 45445 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000097269s
	[INFO] 10.244.0.20:58393 - 57482 "AAAA IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000035561s
	[INFO] 10.244.0.20:55644 - 61451 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.001379991s
	[INFO] 10.244.0.20:55644 - 47368 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.00183288s
	[INFO] 10.244.0.20:48153 - 560 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000085578s
	[INFO] 10.244.0.20:48153 - 6198 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000247297s
	[INFO] 10.244.0.24:49790 - 19022 "A IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.00020185s
	[INFO] 10.244.0.24:40296 - 40439 "AAAA IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.000092002s
	[INFO] 10.244.0.24:55414 - 3419 "AAAA IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000280092s
	[INFO] 10.244.0.24:53343 - 47733 "A IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.00031466s
	[INFO] 10.244.0.24:37486 - 20682 "AAAA IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.000161999s
	[INFO] 10.244.0.24:53680 - 62293 "A IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.000100913s
	[INFO] 10.244.0.24:50638 - 63946 "AAAA IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.002934748s
	[INFO] 10.244.0.24:60061 - 56417 "A IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.002707848s
	[INFO] 10.244.0.24:52158 - 55580 "A IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 648 0.001257943s
	[INFO] 10.244.0.24:53012 - 57901 "AAAA IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 240 0.002368688s
	[INFO] 10.244.0.26:59519 - 2 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000214272s
	[INFO] 10.244.0.26:40136 - 3 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.00086554s
	
	
	==> describe nodes <==
	Name:               addons-155517
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=addons-155517
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=b003e6195fd8aae2e8757a7316e2960f465339c8
	                    minikube.k8s.io/name=addons-155517
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_07_04T01_09_00_0700
	                    minikube.k8s.io/version=v1.33.1
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	                    topology.hostpath.csi/node=addons-155517
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Thu, 04 Jul 2024 01:08:57 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  addons-155517
	  AcquireTime:     <unset>
	  RenewTime:       Thu, 04 Jul 2024 01:14:16 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Thu, 04 Jul 2024 01:12:34 +0000   Thu, 04 Jul 2024 01:08:54 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Thu, 04 Jul 2024 01:12:34 +0000   Thu, 04 Jul 2024 01:08:54 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Thu, 04 Jul 2024 01:12:34 +0000   Thu, 04 Jul 2024 01:08:54 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Thu, 04 Jul 2024 01:12:34 +0000   Thu, 04 Jul 2024 01:09:10 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    addons-155517
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022360Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022360Ki
	  pods:               110
	System Info:
	  Machine ID:                 9fd27c19cbdd40e797fc2e621404e195
	  System UUID:                ff208bb9-dbd1-4ea7-9a2d-001b90e3d2d4
	  Boot ID:                    8f650b57-d36f-4952-bd7f-5577bab5f375
	  Kernel Version:             5.15.0-1064-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.18
	  Kubelet Version:            v1.30.2
	  Kube-Proxy Version:         v1.30.2
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (18 in total)
	  Namespace                   Name                                         CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                         ------------  ----------  ---------------  -------------  ---
	  gadget                      gadget-9pgwd                                 0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         5m1s
	  gcp-auth                    gcp-auth-5db96cd9b4-s44p2                    0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         3m32s
	  headlamp                    headlamp-7867546754-xmxdd                    0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         3m28s
	  ingress-nginx               ingress-nginx-controller-768f948f8f-rqqqs    100m (5%!)(MISSING)     0 (0%!)(MISSING)      90Mi (1%!)(MISSING)        0 (0%!)(MISSING)         4m59s
	  kube-system                 coredns-7db6d8ff4d-5x2l7                     100m (5%!)(MISSING)     0 (0%!)(MISSING)      70Mi (0%!)(MISSING)        170Mi (2%!)(MISSING)     5m8s
	  kube-system                 etcd-addons-155517                           100m (5%!)(MISSING)     0 (0%!)(MISSING)      100Mi (1%!)(MISSING)       0 (0%!)(MISSING)         5m22s
	  kube-system                 kindnet-7qr8x                                100m (5%!)(MISSING)     100m (5%!)(MISSING)   50Mi (0%!)(MISSING)        50Mi (0%!)(MISSING)      5m8s
	  kube-system                 kube-apiserver-addons-155517                 250m (12%!)(MISSING)    0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         5m22s
	  kube-system                 kube-controller-manager-addons-155517        200m (10%!)(MISSING)    0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         5m22s
	  kube-system                 kube-ingress-dns-minikube                    0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         5m4s
	  kube-system                 kube-proxy-62r6j                             0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         5m8s
	  kube-system                 kube-scheduler-addons-155517                 100m (5%!)(MISSING)     0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         5m22s
	  kube-system                 metrics-server-c59844bb4-csqts               100m (5%!)(MISSING)     0 (0%!)(MISSING)      200Mi (2%!)(MISSING)       0 (0%!)(MISSING)         5m3s
	  kube-system                 storage-provisioner                          0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         5m3s
	  volcano-system              volcano-admission-5f7844f7bc-kv4hh           0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         4m59s
	  volcano-system              volcano-controllers-59cb4746db-b7bd8         0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         4m58s
	  volcano-system              volcano-scheduler-844f6db89b-bwtk7           0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         4m58s
	  yakd-dashboard              yakd-dashboard-799879c74f-gdj2b              0 (0%!)(MISSING)        0 (0%!)(MISSING)      128Mi (1%!)(MISSING)       256Mi (3%!)(MISSING)     5m1s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests     Limits
	  --------           --------     ------
	  cpu                1050m (52%!)(MISSING)  100m (5%!)(MISSING)
	  memory             638Mi (8%!)(MISSING)   476Mi (6%!)(MISSING)
	  ephemeral-storage  0 (0%!)(MISSING)       0 (0%!)(MISSING)
	  hugepages-1Gi      0 (0%!)(MISSING)       0 (0%!)(MISSING)
	  hugepages-2Mi      0 (0%!)(MISSING)       0 (0%!)(MISSING)
	  hugepages-32Mi     0 (0%!)(MISSING)       0 (0%!)(MISSING)
	  hugepages-64Ki     0 (0%!)(MISSING)       0 (0%!)(MISSING)
	Events:
	  Type    Reason                   Age    From             Message
	  ----    ------                   ----   ----             -------
	  Normal  Starting                 5m6s   kube-proxy       
	  Normal  Starting                 5m23s  kubelet          Starting kubelet.
	  Normal  NodeHasSufficientMemory  5m23s  kubelet          Node addons-155517 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    5m23s  kubelet          Node addons-155517 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     5m23s  kubelet          Node addons-155517 status is now: NodeHasSufficientPID
	  Normal  NodeNotReady             5m23s  kubelet          Node addons-155517 status is now: NodeNotReady
	  Normal  NodeAllocatableEnforced  5m22s  kubelet          Updated Node Allocatable limit across pods
	  Normal  NodeReady                5m12s  kubelet          Node addons-155517 status is now: NodeReady
	  Normal  RegisteredNode           5m9s   node-controller  Node addons-155517 event: Registered Node addons-155517 in Controller
	
	
	==> dmesg <==
	[  +0.001017] FS-Cache: O-key=[8] '1671ed0000000000'
	[  +0.000678] FS-Cache: N-cookie c=00000030 [p=00000027 fl=2 nc=0 na=1]
	[  +0.000893] FS-Cache: N-cookie d=00000000915207cd{9p.inode} n=00000000b09744a6
	[  +0.001005] FS-Cache: N-key=[8] '1671ed0000000000'
	[  +0.002621] FS-Cache: Duplicate cookie detected
	[  +0.000687] FS-Cache: O-cookie c=0000002a [p=00000027 fl=226 nc=0 na=1]
	[  +0.000917] FS-Cache: O-cookie d=00000000915207cd{9p.inode} n=000000001e9df579
	[  +0.001010] FS-Cache: O-key=[8] '1671ed0000000000'
	[  +0.000678] FS-Cache: N-cookie c=00000031 [p=00000027 fl=2 nc=0 na=1]
	[  +0.000899] FS-Cache: N-cookie d=00000000915207cd{9p.inode} n=00000000dd6a8763
	[  +0.001363] FS-Cache: N-key=[8] '1671ed0000000000'
	[  +2.399414] FS-Cache: Duplicate cookie detected
	[  +0.000668] FS-Cache: O-cookie c=00000028 [p=00000027 fl=226 nc=0 na=1]
	[  +0.000927] FS-Cache: O-cookie d=00000000915207cd{9p.inode} n=00000000807950c0
	[  +0.001002] FS-Cache: O-key=[8] '1571ed0000000000'
	[  +0.000681] FS-Cache: N-cookie c=00000033 [p=00000027 fl=2 nc=0 na=1]
	[  +0.000944] FS-Cache: N-cookie d=00000000915207cd{9p.inode} n=00000000b09744a6
	[  +0.000997] FS-Cache: N-key=[8] '1571ed0000000000'
	[  +0.414782] FS-Cache: Duplicate cookie detected
	[  +0.000671] FS-Cache: O-cookie c=0000002d [p=00000027 fl=226 nc=0 na=1]
	[  +0.000917] FS-Cache: O-cookie d=00000000915207cd{9p.inode} n=0000000007b93ca1
	[  +0.000982] FS-Cache: O-key=[8] '1b71ed0000000000'
	[  +0.000661] FS-Cache: N-cookie c=00000034 [p=00000027 fl=2 nc=0 na=1]
	[  +0.000895] FS-Cache: N-cookie d=00000000915207cd{9p.inode} n=00000000f6098f89
	[  +0.000977] FS-Cache: N-key=[8] '1b71ed0000000000'
	
	
	==> etcd [f517c28ada41915907ef5e67d80504dca45673592d66bc4f85c35c8241aa8787] <==
	{"level":"info","ts":"2024-07-04T01:08:53.343057Z","caller":"embed/etcd.go:569","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-07-04T01:08:53.344289Z","caller":"etcdserver/server.go:744","msg":"started as single-node; fast-forwarding election ticks","local-member-id":"aec36adc501070cc","forward-ticks":9,"forward-duration":"900ms","election-ticks":10,"election-timeout":"1s"}
	{"level":"info","ts":"2024-07-04T01:08:53.344386Z","caller":"fileutil/purge.go:50","msg":"started to purge file","dir":"/var/lib/minikube/etcd/member/snap","suffix":"snap.db","max":5,"interval":"30s"}
	{"level":"info","ts":"2024-07-04T01:08:53.344412Z","caller":"fileutil/purge.go:50","msg":"started to purge file","dir":"/var/lib/minikube/etcd/member/snap","suffix":"snap","max":5,"interval":"30s"}
	{"level":"info","ts":"2024-07-04T01:08:53.344432Z","caller":"fileutil/purge.go:50","msg":"started to purge file","dir":"/var/lib/minikube/etcd/member/wal","suffix":"wal","max":5,"interval":"30s"}
	{"level":"info","ts":"2024-07-04T01:08:53.34541Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc switched to configuration voters=(12593026477526642892)"}
	{"level":"info","ts":"2024-07-04T01:08:53.345493Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","added-peer-id":"aec36adc501070cc","added-peer-peer-urls":["https://192.168.49.2:2380"]}
	{"level":"info","ts":"2024-07-04T01:08:53.831529Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 1"}
	{"level":"info","ts":"2024-07-04T01:08:53.831678Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 1"}
	{"level":"info","ts":"2024-07-04T01:08:53.831724Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 1"}
	{"level":"info","ts":"2024-07-04T01:08:53.831768Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 2"}
	{"level":"info","ts":"2024-07-04T01:08:53.831803Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-07-04T01:08:53.831834Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 2"}
	{"level":"info","ts":"2024-07-04T01:08:53.831875Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-07-04T01:08:53.833983Z","caller":"etcdserver/server.go:2578","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-07-04T01:08:53.834852Z","caller":"etcdserver/server.go:2068","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:addons-155517 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-07-04T01:08:53.835607Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-07-04T01:08:53.835809Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-07-04T01:08:53.836016Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-07-04T01:08:53.836115Z","caller":"etcdserver/server.go:2602","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-07-04T01:08:53.836033Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-07-04T01:08:53.84114Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-07-04T01:08:53.856473Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-07-04T01:08:53.859549Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-07-04T01:08:53.860789Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	
	
	==> gcp-auth [176e35400e6b4d15467a19b19558445b7d1b5dbec42ad8098a036e029f6b3077] <==
	2024/07/04 01:10:52 GCP Auth Webhook started!
	2024/07/04 01:10:54 Ready to marshal response ...
	2024/07/04 01:10:54 Ready to write response ...
	2024/07/04 01:10:54 Ready to marshal response ...
	2024/07/04 01:10:54 Ready to write response ...
	2024/07/04 01:10:54 Ready to marshal response ...
	2024/07/04 01:10:54 Ready to write response ...
	2024/07/04 01:11:04 Ready to marshal response ...
	2024/07/04 01:11:04 Ready to write response ...
	2024/07/04 01:11:19 Ready to marshal response ...
	2024/07/04 01:11:19 Ready to write response ...
	2024/07/04 01:11:19 Ready to marshal response ...
	2024/07/04 01:11:19 Ready to write response ...
	2024/07/04 01:11:20 Ready to marshal response ...
	2024/07/04 01:11:20 Ready to write response ...
	2024/07/04 01:11:20 Ready to marshal response ...
	2024/07/04 01:11:20 Ready to write response ...
	2024/07/04 01:11:28 Ready to marshal response ...
	2024/07/04 01:11:28 Ready to write response ...
	2024/07/04 01:12:21 Ready to marshal response ...
	2024/07/04 01:12:21 Ready to write response ...
	2024/07/04 01:12:36 Ready to marshal response ...
	2024/07/04 01:12:36 Ready to write response ...
	
	
	==> kernel <==
	 01:14:22 up  6:56,  0 users,  load average: 0.84, 2.30, 3.08
	Linux addons-155517 5.15.0-1064-aws #70~20.04.1-Ubuntu SMP Thu Jun 27 14:52:48 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [67cab13d85edc9d5c0f37e6bf189f122a928a79d09306ee9a7e93dbc16acca46] <==
	I0704 01:12:16.128555       1 main.go:227] handling current node
	I0704 01:12:26.145149       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:12:26.145174       1 main.go:227] handling current node
	I0704 01:12:36.155553       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:12:36.155580       1 main.go:227] handling current node
	I0704 01:12:46.161496       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:12:46.161527       1 main.go:227] handling current node
	I0704 01:12:56.170991       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:12:56.171021       1 main.go:227] handling current node
	I0704 01:13:06.177920       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:13:06.177951       1 main.go:227] handling current node
	I0704 01:13:16.183077       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:13:16.183119       1 main.go:227] handling current node
	I0704 01:13:26.195234       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:13:26.195260       1 main.go:227] handling current node
	I0704 01:13:36.207113       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:13:36.207144       1 main.go:227] handling current node
	I0704 01:13:46.211555       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:13:46.211583       1 main.go:227] handling current node
	I0704 01:13:56.222734       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:13:56.222773       1 main.go:227] handling current node
	I0704 01:14:06.226686       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:14:06.226718       1 main.go:227] handling current node
	I0704 01:14:16.230045       1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
	I0704 01:14:16.230074       1 main.go:227] handling current node
	
	
	==> kube-apiserver [d5678b588829bae555d8757bb9d8a3f9d182137807bb650acee005f9f590f5d2] <==
	E0704 01:10:30.230867       1 dispatcher.go:214] failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.108.205.215:443: connect: connection refused
	W0704 01:10:50.119174       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.108.205.215:443: connect: connection refused
	E0704 01:10:50.119216       1 dispatcher.go:214] failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.108.205.215:443: connect: connection refused
	I0704 01:10:54.119844       1 alloc.go:330] "allocated clusterIPs" service="headlamp/headlamp" clusterIPs={"IPv4":"10.102.249.245"}
	E0704 01:11:07.831246       1 watch.go:250] http2: stream closed
	I0704 01:11:20.626614       1 controller.go:615] quota admission added evaluator for: jobs.batch.volcano.sh
	I0704 01:11:20.663546       1 controller.go:615] quota admission added evaluator for: podgroups.scheduling.volcano.sh
	E0704 01:11:44.500768       1 authentication.go:73] "Unable to authenticate the request" err="[invalid bearer token, serviceaccounts \"local-path-provisioner-service-account\" not found]"
	I0704 01:12:33.267448       1 controller.go:615] quota admission added evaluator for: volumesnapshots.snapshot.storage.k8s.io
	E0704 01:12:45.514392       1 authentication.go:73] "Unable to authenticate the request" err="[invalid bearer token, serviceaccounts \"csi-hostpathplugin-sa\" not found]"
	I0704 01:12:52.070076       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
	I0704 01:12:52.070121       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
	I0704 01:12:52.111653       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
	I0704 01:12:52.111699       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
	I0704 01:12:52.123212       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
	I0704 01:12:52.123255       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
	I0704 01:12:52.144000       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
	I0704 01:12:52.144039       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
	I0704 01:12:52.193146       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
	I0704 01:12:52.193191       1 handler.go:286] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
	W0704 01:12:53.114139       1 cacher.go:168] Terminating all watchers from cacher volumesnapshotclasses.snapshot.storage.k8s.io
	W0704 01:12:53.193357       1 cacher.go:168] Terminating all watchers from cacher volumesnapshotcontents.snapshot.storage.k8s.io
	W0704 01:12:53.202807       1 cacher.go:168] Terminating all watchers from cacher volumesnapshots.snapshot.storage.k8s.io
	I0704 01:12:58.929384       1 handler.go:286] Adding GroupVersion gadget.kinvolk.io v1alpha1 to ResourceManager
	W0704 01:12:59.966476       1 cacher.go:168] Terminating all watchers from cacher traces.gadget.kinvolk.io
	
	
	==> kube-controller-manager [d26768fcef3f904fbe4d8309b2336e0d0536a0636f241b26984323c589bd890e] <==
	E0704 01:13:29.960778       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	W0704 01:13:34.389025       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:13:34.389062       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:13:41.568521       1 namespace_controller.go:159] deletion of namespace gadget failed: unexpected items still remain in namespace: gadget for gvr: /v1, Resource=pods
	E0704 01:13:41.717470       1 namespace_controller.go:159] deletion of namespace gadget failed: unexpected items still remain in namespace: gadget for gvr: /v1, Resource=pods
	E0704 01:13:41.853353       1 namespace_controller.go:159] deletion of namespace gadget failed: unexpected items still remain in namespace: gadget for gvr: /v1, Resource=pods
	E0704 01:13:42.019385       1 namespace_controller.go:159] deletion of namespace gadget failed: unexpected items still remain in namespace: gadget for gvr: /v1, Resource=pods
	E0704 01:13:42.287264       1 namespace_controller.go:159] deletion of namespace gadget failed: unexpected items still remain in namespace: gadget for gvr: /v1, Resource=pods
	E0704 01:13:42.561688       1 namespace_controller.go:159] deletion of namespace gadget failed: unexpected items still remain in namespace: gadget for gvr: /v1, Resource=pods
	E0704 01:13:42.871022       1 namespace_controller.go:159] deletion of namespace gadget failed: unexpected items still remain in namespace: gadget for gvr: /v1, Resource=pods
	E0704 01:13:43.321273       1 namespace_controller.go:159] deletion of namespace gadget failed: unexpected items still remain in namespace: gadget for gvr: /v1, Resource=pods
	E0704 01:13:44.106188       1 namespace_controller.go:159] deletion of namespace gadget failed: unexpected items still remain in namespace: gadget for gvr: /v1, Resource=pods
	E0704 01:13:45.531228       1 namespace_controller.go:159] deletion of namespace gadget failed: unexpected items still remain in namespace: gadget for gvr: /v1, Resource=pods
	E0704 01:13:48.235417       1 namespace_controller.go:159] deletion of namespace gadget failed: unexpected items still remain in namespace: gadget for gvr: /v1, Resource=pods
	E0704 01:13:53.492335       1 namespace_controller.go:159] deletion of namespace gadget failed: unexpected items still remain in namespace: gadget for gvr: /v1, Resource=pods
	E0704 01:14:03.874801       1 namespace_controller.go:159] deletion of namespace gadget failed: unexpected items still remain in namespace: gadget for gvr: /v1, Resource=pods
	W0704 01:14:06.338705       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:14:06.338750       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	W0704 01:14:16.691107       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:14:16.691151       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	W0704 01:14:17.365437       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:14:17.365477       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:14:18.603582       1 namespace_controller.go:159] deletion of namespace gadget failed: unexpected items still remain in namespace: gadget for gvr: /v1, Resource=pods
	W0704 01:14:20.463017       1 reflector.go:547] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0704 01:14:20.463055       1 reflector.go:150] k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	
	
	==> kube-proxy [8a696cf44b3b1b9718c7bc5215c5dd91d048a8496501845e4b243c3a46ba4f90] <==
	I0704 01:09:15.745880       1 server_linux.go:69] "Using iptables proxy"
	I0704 01:09:15.772902       1 server.go:1062] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	I0704 01:09:15.799195       1 server.go:659] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0704 01:09:15.799240       1 server_linux.go:165] "Using iptables Proxier"
	I0704 01:09:15.804393       1 server_linux.go:511] "Detect-local-mode set to ClusterCIDR, but no cluster CIDR for family" ipFamily="IPv6"
	I0704 01:09:15.804419       1 server_linux.go:528] "Defaulting to no-op detect-local"
	I0704 01:09:15.804446       1 proxier.go:243] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses"
	I0704 01:09:15.804673       1 server.go:872] "Version info" version="v1.30.2"
	I0704 01:09:15.804688       1 server.go:874] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0704 01:09:15.805805       1 config.go:192] "Starting service config controller"
	I0704 01:09:15.805822       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0704 01:09:15.805846       1 config.go:101] "Starting endpoint slice config controller"
	I0704 01:09:15.805850       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0704 01:09:15.808392       1 config.go:319] "Starting node config controller"
	I0704 01:09:15.808406       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0704 01:09:15.906774       1 shared_informer.go:320] Caches are synced for service config
	I0704 01:09:15.906727       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0704 01:09:15.908585       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [566fe4aca8adb536ad06b4727d9447ef68bd730ff7b9e8ddd94c6dfc6a8de11a] <==
	W0704 01:08:57.400228       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0704 01:08:57.400246       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	W0704 01:08:57.400636       1 reflector.go:547] runtime/asm_arm64.s:1222: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0704 01:08:57.400664       1 reflector.go:150] runtime/asm_arm64.s:1222: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	W0704 01:08:57.400873       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0704 01:08:57.400898       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	W0704 01:08:57.401054       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0704 01:08:57.401076       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	W0704 01:08:57.401091       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0704 01:08:57.401108       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	W0704 01:08:57.401143       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0704 01:08:57.401163       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	W0704 01:08:57.401174       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0704 01:08:57.401182       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	W0704 01:08:58.239191       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0704 01:08:58.239466       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	W0704 01:08:58.379662       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0704 01:08:58.379764       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	W0704 01:08:58.415451       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0704 01:08:58.415596       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	W0704 01:08:58.433246       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0704 01:08:58.433288       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	W0704 01:08:58.520896       1 reflector.go:547] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0704 01:08:58.520940       1 reflector.go:150] k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	I0704 01:08:58.883633       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Jul 04 01:13:51 addons-155517 kubelet[1544]: E0704 01:13:51.977897    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"473010c2e2e366bd353df65d79f825406412d765408c6981a527648ef11f8b48\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:13:56 addons-155517 kubelet[1544]: E0704 01:13:56.953178    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"0616b875994993289990adf66114d410f6e63f5ea8054266312d3bc5a042c2aa\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:13:56 addons-155517 kubelet[1544]: E0704 01:13:56.965380    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"3bafec2af7fd71e7ae08f95562d446214cefdad01e6b00d9b9be3eb99acca3ff\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:13:56 addons-155517 kubelet[1544]: E0704 01:13:56.975527    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"75c1c117f2fe9439d2ecc920f497a59d99c09f51b1f598a8e8b47de122d69a9d\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:13:57 addons-155517 kubelet[1544]: I0704 01:13:57.862807    1544 scope.go:117] "RemoveContainer" containerID="43dd36d5979cbf4985344e107ccdd6489aba1101493e5a5e405c508ae9a5dada"
	Jul 04 01:13:57 addons-155517 kubelet[1544]: E0704 01:13:57.863073    1544 pod_workers.go:1298] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"minikube-ingress-dns\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=minikube-ingress-dns pod=kube-ingress-dns-minikube_kube-system(b7e45300-1c0f-463c-914d-3febc516e196)\"" pod="kube-system/kube-ingress-dns-minikube" podUID="b7e45300-1c0f-463c-914d-3febc516e196"
	Jul 04 01:14:01 addons-155517 kubelet[1544]: E0704 01:14:01.959199    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"f0d5dcf9020ccf3bfcde46bff6f5e02e5ad96698f331f999f2c7d800cab562d3\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:14:01 addons-155517 kubelet[1544]: E0704 01:14:01.977530    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"71be4c725f35f551b6440267c15095bc603ec9be226a7b6bb8ed0c16284ed289\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:14:01 addons-155517 kubelet[1544]: E0704 01:14:01.993129    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"dc3cc6f9520e8748dad0e3b8a425b5e5a6bf4a4dd506d206a435d58918f5bad5\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:14:06 addons-155517 kubelet[1544]: E0704 01:14:06.954488    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"75fdd0b6ed224c35ed0f9dca5f583690c6008531b102b05de502b348ceff3761\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:14:06 addons-155517 kubelet[1544]: E0704 01:14:06.965131    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"4b67d2c6c06505c4fec720789417c1232a474d98478ce53a758eff74d48ba48c\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:14:06 addons-155517 kubelet[1544]: E0704 01:14:06.975246    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"f2f37d9b08fde4ae4a2496914674962f1c2c58938a0f0a5605e0873e0e6768cb\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:14:11 addons-155517 kubelet[1544]: I0704 01:14:11.863202    1544 scope.go:117] "RemoveContainer" containerID="43dd36d5979cbf4985344e107ccdd6489aba1101493e5a5e405c508ae9a5dada"
	Jul 04 01:14:11 addons-155517 kubelet[1544]: E0704 01:14:11.863537    1544 pod_workers.go:1298] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"minikube-ingress-dns\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=minikube-ingress-dns pod=kube-ingress-dns-minikube_kube-system(b7e45300-1c0f-463c-914d-3febc516e196)\"" pod="kube-system/kube-ingress-dns-minikube" podUID="b7e45300-1c0f-463c-914d-3febc516e196"
	Jul 04 01:14:11 addons-155517 kubelet[1544]: E0704 01:14:11.953590    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"25c148ffc029dbe201c9dd7de03ce925eb99f700ebf153ca98f4103b16f942ee\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:14:11 addons-155517 kubelet[1544]: E0704 01:14:11.965403    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"ce8b348563b028bee450bbc393c6c29369a8e0495d4601fa8c2f604983c066ee\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:14:11 addons-155517 kubelet[1544]: E0704 01:14:11.985118    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"3745c496c86d9cd76163a5ef24ab9fc6cb6357d974edb13238717564da18774a\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:14:16 addons-155517 kubelet[1544]: E0704 01:14:16.953223    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"d25b50a3968c81e10b4d9e050f2fd22ac7fc7a146258107b7bf6212e40bb0f18\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:14:16 addons-155517 kubelet[1544]: E0704 01:14:16.963454    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"8041c7c67d9719a46d30e977ba67b2481924dba2d12d01df50b559b5ffe57254\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:14:16 addons-155517 kubelet[1544]: E0704 01:14:16.974300    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"963349482a98c4e27fceb4de1e6d3128101eefd0cf7cd61657db3e377da3efb3\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:14:21 addons-155517 kubelet[1544]: E0704 01:14:21.963006    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"6aae312871acfc86842bfabec3da74b8a315f376c4851b63de8ac9f9b38cbd82\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:14:21 addons-155517 kubelet[1544]: E0704 01:14:21.977244    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"464b92ce1eb80b1fe3a181a98d9d87225d19739b41f8559cd82cdcd0acb66e42\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:14:21 addons-155517 kubelet[1544]: E0704 01:14:21.990703    1544 remote_runtime.go:496] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"15ef3e861adab8f29a90d796d4aa7e718a2df457957cfb2add89eb97d14288bf\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="591fbae21d6c821d8480538b5d10da6cffb16dc98b9ed1a283e104814cecc222" cmd=["/bin/gadgettracermanager","-liveness"]
	Jul 04 01:14:22 addons-155517 kubelet[1544]: I0704 01:14:22.862838    1544 scope.go:117] "RemoveContainer" containerID="43dd36d5979cbf4985344e107ccdd6489aba1101493e5a5e405c508ae9a5dada"
	Jul 04 01:14:22 addons-155517 kubelet[1544]: E0704 01:14:22.863110    1544 pod_workers.go:1298] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"minikube-ingress-dns\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=minikube-ingress-dns pod=kube-ingress-dns-minikube_kube-system(b7e45300-1c0f-463c-914d-3febc516e196)\"" pod="kube-system/kube-ingress-dns-minikube" podUID="b7e45300-1c0f-463c-914d-3febc516e196"
	
	
	==> storage-provisioner [33899ba7d59110e40a8409b27a7737ac3b4858d348229b496df42db6a119852b] <==
	I0704 01:09:20.859954       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0704 01:09:20.902425       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0704 01:09:20.902503       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0704 01:09:20.962139       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0704 01:09:20.962380       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_addons-155517_8b223e9f-4dc4-474e-ac36-2da123f117db!
	I0704 01:09:20.963772       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"58c57b6c-af21-4158-b30e-a900c384acaa", APIVersion:"v1", ResourceVersion:"568", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' addons-155517_8b223e9f-4dc4-474e-ac36-2da123f117db became leader
	I0704 01:09:21.062827       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_addons-155517_8b223e9f-4dc4-474e-ac36-2da123f117db!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p addons-155517 -n addons-155517
helpers_test.go:261: (dbg) Run:  kubectl --context addons-155517 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:272: non-running pods: ingress-nginx-admission-create-b7mmt ingress-nginx-admission-patch-4kk74 test-job-nginx-0
helpers_test.go:274: ======> post-mortem[TestAddons/parallel/Volcano]: describe non-running pods <======
helpers_test.go:277: (dbg) Run:  kubectl --context addons-155517 describe pod ingress-nginx-admission-create-b7mmt ingress-nginx-admission-patch-4kk74 test-job-nginx-0
helpers_test.go:277: (dbg) Non-zero exit: kubectl --context addons-155517 describe pod ingress-nginx-admission-create-b7mmt ingress-nginx-admission-patch-4kk74 test-job-nginx-0: exit status 1 (89.701342ms)

                                                
                                                
** stderr ** 
	Error from server (NotFound): pods "ingress-nginx-admission-create-b7mmt" not found
	Error from server (NotFound): pods "ingress-nginx-admission-patch-4kk74" not found
	Error from server (NotFound): pods "test-job-nginx-0" not found

                                                
                                                
** /stderr **
helpers_test.go:279: kubectl --context addons-155517 describe pod ingress-nginx-admission-create-b7mmt ingress-nginx-admission-patch-4kk74 test-job-nginx-0: exit status 1
--- FAIL: TestAddons/parallel/Volcano (199.63s)

                                                
                                    
x
+
TestAddons/serial (0s)

                                                
                                                
=== RUN   TestAddons/serial
addons_test.go:164: Unable to run more tests (deadline exceeded)
--- FAIL: TestAddons/serial (0.00s)

                                                
                                    
x
+
TestAddons/StoppedEnableDisable (0s)

                                                
                                                
=== RUN   TestAddons/StoppedEnableDisable
addons_test.go:174: (dbg) Run:  out/minikube-linux-arm64 stop -p addons-155517
addons_test.go:174: (dbg) Non-zero exit: out/minikube-linux-arm64 stop -p addons-155517: context deadline exceeded (977ns)
addons_test.go:176: failed to stop minikube. args "out/minikube-linux-arm64 stop -p addons-155517" : context deadline exceeded
addons_test.go:178: (dbg) Run:  out/minikube-linux-arm64 addons enable dashboard -p addons-155517
addons_test.go:178: (dbg) Non-zero exit: out/minikube-linux-arm64 addons enable dashboard -p addons-155517: context deadline exceeded (156ns)
addons_test.go:180: failed to enable dashboard addon: args "out/minikube-linux-arm64 addons enable dashboard -p addons-155517" : context deadline exceeded
addons_test.go:182: (dbg) Run:  out/minikube-linux-arm64 addons disable dashboard -p addons-155517
addons_test.go:182: (dbg) Non-zero exit: out/minikube-linux-arm64 addons disable dashboard -p addons-155517: context deadline exceeded (132ns)
addons_test.go:184: failed to disable dashboard addon: args "out/minikube-linux-arm64 addons disable dashboard -p addons-155517" : context deadline exceeded
addons_test.go:187: (dbg) Run:  out/minikube-linux-arm64 addons disable gvisor -p addons-155517
addons_test.go:187: (dbg) Non-zero exit: out/minikube-linux-arm64 addons disable gvisor -p addons-155517: context deadline exceeded (132ns)
addons_test.go:189: failed to disable non-enabled addon: args "out/minikube-linux-arm64 addons disable gvisor -p addons-155517" : context deadline exceeded
--- FAIL: TestAddons/StoppedEnableDisable (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageLoadDaemon (4.09s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageLoadDaemon
functional_test.go:354: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 image load --daemon gcr.io/google-containers/addon-resizer:functional-781779 --alsologtostderr
functional_test.go:354: (dbg) Done: out/minikube-linux-arm64 -p functional-781779 image load --daemon gcr.io/google-containers/addon-resizer:functional-781779 --alsologtostderr: (3.822311414s)
functional_test.go:447: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 image ls
functional_test.go:442: expected "gcr.io/google-containers/addon-resizer:functional-781779" to be loaded into minikube but the image is not there
--- FAIL: TestFunctional/parallel/ImageCommands/ImageLoadDaemon (4.09s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageReloadDaemon (4.76s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageReloadDaemon
functional_test.go:364: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 image load --daemon gcr.io/google-containers/addon-resizer:functional-781779 --alsologtostderr
2024/07/04 01:52:36 [DEBUG] GET http://127.0.0.1:36195/api/v1/namespaces/kubernetes-dashboard/services/http:kubernetes-dashboard:/proxy/
functional_test.go:364: (dbg) Done: out/minikube-linux-arm64 -p functional-781779 image load --daemon gcr.io/google-containers/addon-resizer:functional-781779 --alsologtostderr: (4.485655802s)
functional_test.go:447: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 image ls
functional_test.go:442: expected "gcr.io/google-containers/addon-resizer:functional-781779" to be loaded into minikube but the image is not there
--- FAIL: TestFunctional/parallel/ImageCommands/ImageReloadDaemon (4.76s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon (5.09s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon
functional_test.go:234: (dbg) Run:  docker pull gcr.io/google-containers/addon-resizer:1.8.9
functional_test.go:234: (dbg) Done: docker pull gcr.io/google-containers/addon-resizer:1.8.9: (1.65832892s)
functional_test.go:239: (dbg) Run:  docker tag gcr.io/google-containers/addon-resizer:1.8.9 gcr.io/google-containers/addon-resizer:functional-781779
functional_test.go:244: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 image load --daemon gcr.io/google-containers/addon-resizer:functional-781779 --alsologtostderr
functional_test.go:244: (dbg) Done: out/minikube-linux-arm64 -p functional-781779 image load --daemon gcr.io/google-containers/addon-resizer:functional-781779 --alsologtostderr: (3.150033237s)
functional_test.go:447: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 image ls
functional_test.go:442: expected "gcr.io/google-containers/addon-resizer:functional-781779" to be loaded into minikube but the image is not there
--- FAIL: TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon (5.09s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageSaveToFile (0.67s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageSaveToFile
functional_test.go:379: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 image save gcr.io/google-containers/addon-resizer:functional-781779 /home/jenkins/workspace/Docker_Linux_containerd_arm64/addon-resizer-save.tar --alsologtostderr
functional_test.go:385: expected "/home/jenkins/workspace/Docker_Linux_containerd_arm64/addon-resizer-save.tar" to exist after `image save`, but doesn't exist
--- FAIL: TestFunctional/parallel/ImageCommands/ImageSaveToFile (0.67s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageLoadFromFile (0.2s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageLoadFromFile
functional_test.go:408: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 image load /home/jenkins/workspace/Docker_Linux_containerd_arm64/addon-resizer-save.tar --alsologtostderr
functional_test.go:410: loading image into minikube from file: <nil>

                                                
                                                
** stderr ** 
	I0704 01:52:46.225904 1242492 out.go:291] Setting OutFile to fd 1 ...
	I0704 01:52:46.226674 1242492 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:52:46.226692 1242492 out.go:304] Setting ErrFile to fd 2...
	I0704 01:52:46.226699 1242492 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:52:46.227078 1242492 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
	I0704 01:52:46.227931 1242492 config.go:182] Loaded profile config "functional-781779": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 01:52:46.228119 1242492 config.go:182] Loaded profile config "functional-781779": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 01:52:46.228684 1242492 cli_runner.go:164] Run: docker container inspect functional-781779 --format={{.State.Status}}
	I0704 01:52:46.247803 1242492 ssh_runner.go:195] Run: systemctl --version
	I0704 01:52:46.247903 1242492 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-781779
	I0704 01:52:46.266450 1242492 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33956 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/functional-781779/id_rsa Username:docker}
	I0704 01:52:46.364450 1242492 cache_images.go:286] Loading image from: /home/jenkins/workspace/Docker_Linux_containerd_arm64/addon-resizer-save.tar
	W0704 01:52:46.364527 1242492 cache_images.go:254] Failed to load cached images for profile functional-781779. make sure the profile is running. loading images: stat /home/jenkins/workspace/Docker_Linux_containerd_arm64/addon-resizer-save.tar: no such file or directory
	I0704 01:52:46.364545 1242492 cache_images.go:262] succeeded pushing to: 
	I0704 01:52:46.364559 1242492 cache_images.go:263] failed pushing to: functional-781779

                                                
                                                
** /stderr **
--- FAIL: TestFunctional/parallel/ImageCommands/ImageLoadFromFile (0.20s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/SecondStart (373.14s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/SecondStart
start_stop_delete_test.go:256: (dbg) Run:  out/minikube-linux-arm64 start -p old-k8s-version-610521 --memory=2200 --alsologtostderr --wait=true --kvm-network=default --kvm-qemu-uri=qemu:///system --disable-driver-mounts --keep-context=false --driver=docker  --container-runtime=containerd --kubernetes-version=v1.20.0
E0704 02:37:18.709782 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
E0704 02:37:21.270163 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
E0704 02:37:25.631764 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
E0704 02:37:26.390525 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
E0704 02:37:36.631326 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
E0704 02:37:49.721418 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:37:57.112254 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
E0704 02:38:12.321331 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
E0704 02:38:12.326751 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
E0704 02:38:12.337080 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
E0704 02:38:12.357427 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
E0704 02:38:12.397753 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
E0704 02:38:12.478147 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
E0704 02:38:12.638560 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
E0704 02:38:12.959154 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
E0704 02:38:13.599356 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
E0704 02:38:14.880092 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
E0704 02:38:17.440662 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
E0704 02:38:20.116940 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
E0704 02:38:22.560971 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
E0704 02:38:32.802035 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
E0704 02:38:38.072473 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
E0704 02:38:41.541372 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
E0704 02:38:41.546692 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
E0704 02:38:41.556992 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
E0704 02:38:41.577234 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
E0704 02:38:41.617568 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
E0704 02:38:41.698024 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
E0704 02:38:41.858326 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
E0704 02:38:42.178725 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
E0704 02:38:42.819107 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
E0704 02:38:44.099317 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
E0704 02:38:46.659589 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
E0704 02:38:48.205204 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/auto-104147/client.crt: no such file or directory
E0704 02:38:51.779746 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
E0704 02:38:53.282810 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
E0704 02:39:02.020251 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
E0704 02:39:11.641634 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:39:15.887944 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/auto-104147/client.crt: no such file or directory
E0704 02:39:22.500696 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
E0704 02:39:34.243563 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
E0704 02:39:41.788786 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
E0704 02:39:59.993899 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
E0704 02:40:03.461570 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
E0704 02:40:09.472276 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
E0704 02:40:36.193203 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
E0704 02:40:36.274459 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
start_stop_delete_test.go:256: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p old-k8s-version-610521 --memory=2200 --alsologtostderr --wait=true --kvm-network=default --kvm-qemu-uri=qemu:///system --disable-driver-mounts --keep-context=false --driver=docker  --container-runtime=containerd --kubernetes-version=v1.20.0: exit status 102 (6m10.058811529s)

                                                
                                                
-- stdout --
	* [old-k8s-version-610521] minikube v1.33.1 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=18859
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	* Kubernetes 1.30.2 is now available. If you would like to upgrade, specify: --kubernetes-version=v1.30.2
	* Using the docker driver based on existing profile
	* Starting "old-k8s-version-610521" primary control-plane node in "old-k8s-version-610521" cluster
	* Pulling base image v0.0.44-1719972989-19184 ...
	* Restarting existing docker container for "old-k8s-version-610521" ...
	* Preparing Kubernetes v1.20.0 on containerd 1.7.18 ...
	* Verifying Kubernetes components...
	  - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	  - Using image fake.domain/registry.k8s.io/echoserver:1.4
	  - Using image docker.io/kubernetesui/dashboard:v2.7.0
	  - Using image registry.k8s.io/echoserver:1.4
	* Some dashboard features require the metrics-server addon. To enable all features please run:
	
		minikube -p old-k8s-version-610521 addons enable metrics-server
	
	* Enabled addons: storage-provisioner, metrics-server, dashboard, default-storageclass
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0704 02:37:17.878373 1461358 out.go:291] Setting OutFile to fd 1 ...
	I0704 02:37:17.878496 1461358 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 02:37:17.878513 1461358 out.go:304] Setting ErrFile to fd 2...
	I0704 02:37:17.878531 1461358 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 02:37:17.878768 1461358 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
	I0704 02:37:17.879224 1461358 out.go:298] Setting JSON to false
	I0704 02:37:17.880508 1461358 start.go:129] hostinfo: {"hostname":"ip-172-31-30-239","uptime":29988,"bootTime":1720030650,"procs":241,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1064-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"92f46a7d-c249-4c12-924a-77f64874c910"}
	I0704 02:37:17.880582 1461358 start.go:139] virtualization:  
	I0704 02:37:17.883304 1461358 out.go:177] * [old-k8s-version-610521] minikube v1.33.1 on Ubuntu 20.04 (arm64)
	I0704 02:37:17.885150 1461358 out.go:177]   - MINIKUBE_LOCATION=18859
	I0704 02:37:17.885335 1461358 notify.go:220] Checking for updates...
	I0704 02:37:17.889048 1461358 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0704 02:37:17.890994 1461358 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig
	I0704 02:37:17.892839 1461358 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube
	I0704 02:37:17.894471 1461358 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0704 02:37:17.896097 1461358 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0704 02:37:17.898338 1461358 config.go:182] Loaded profile config "old-k8s-version-610521": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.20.0
	I0704 02:37:17.900716 1461358 out.go:177] * Kubernetes 1.30.2 is now available. If you would like to upgrade, specify: --kubernetes-version=v1.30.2
	I0704 02:37:17.902430 1461358 driver.go:392] Setting default libvirt URI to qemu:///system
	I0704 02:37:17.923805 1461358 docker.go:122] docker version: linux-27.0.3:Docker Engine - Community
	I0704 02:37:17.923928 1461358 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 02:37:17.990723 1461358 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:2 ContainersRunning:1 ContainersPaused:0 ContainersStopped:1 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:39 OomKillDisable:true NGoroutines:51 SystemTime:2024-07-04 02:37:17.980215484 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 02:37:17.990830 1461358 docker.go:295] overlay module found
	I0704 02:37:17.993128 1461358 out.go:177] * Using the docker driver based on existing profile
	I0704 02:37:17.994848 1461358 start.go:297] selected driver: docker
	I0704 02:37:17.994872 1461358 start.go:901] validating driver "docker" against &{Name:old-k8s-version-610521 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:true NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.20.0 ClusterName:old-k8s-version-610521 Namespace:default APIServerHAVIP: AP
IServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.76.2 Port:8443 KubernetesVersion:v1.20.0 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[dashboard:true default-storageclass:true metrics-server:true storage-provisioner:true] CustomAddonImages:map[MetricsScraper:registry.k8s.io/echoserver:1.4 MetricsServer:registry.k8s.io/echoserver:1.4] CustomAddonRegistries:map[MetricsServer:fake.domain] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:fa
lse MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0704 02:37:17.995004 1461358 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0704 02:37:17.995734 1461358 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 02:37:18.068444 1461358 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:2 ContainersRunning:1 ContainersPaused:0 ContainersStopped:1 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:39 OomKillDisable:true NGoroutines:51 SystemTime:2024-07-04 02:37:18.05192558 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aarc
h64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerError
s:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 02:37:18.068806 1461358 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0704 02:37:18.068841 1461358 cni.go:84] Creating CNI manager for ""
	I0704 02:37:18.068851 1461358 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0704 02:37:18.068898 1461358 start.go:340] cluster config:
	{Name:old-k8s-version-610521 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:true NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.20.0 ClusterName:old-k8s-version-610521 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local
ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.76.2 Port:8443 KubernetesVersion:v1.20.0 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[dashboard:true default-storageclass:true metrics-server:true storage-provisioner:true] CustomAddonImages:map[MetricsScraper:registry.k8s.io/echoserver:1.4 MetricsServer:registry.k8s.io/echoserver:1.4] CustomAddonRegistries:map[MetricsServer:fake.domain] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:do
cker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0704 02:37:18.070966 1461358 out.go:177] * Starting "old-k8s-version-610521" primary control-plane node in "old-k8s-version-610521" cluster
	I0704 02:37:18.072653 1461358 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0704 02:37:18.074849 1461358 out.go:177] * Pulling base image v0.0.44-1719972989-19184 ...
	I0704 02:37:18.076962 1461358 preload.go:132] Checking if preload exists for k8s version v1.20.0 and runtime containerd
	I0704 02:37:18.077028 1461358 preload.go:147] Found local preload: /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4
	I0704 02:37:18.077047 1461358 cache.go:56] Caching tarball of preloaded images
	I0704 02:37:18.077054 1461358 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 in local docker daemon
	I0704 02:37:18.077131 1461358 preload.go:173] Found /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0704 02:37:18.077141 1461358 cache.go:59] Finished verifying existence of preloaded tar for v1.20.0 on containerd
	I0704 02:37:18.077268 1461358 profile.go:143] Saving config to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/config.json ...
	I0704 02:37:18.095527 1461358 image.go:83] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 in local docker daemon, skipping pull
	I0704 02:37:18.095566 1461358 cache.go:144] gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 exists in daemon, skipping load
	I0704 02:37:18.095592 1461358 cache.go:194] Successfully downloaded all kic artifacts
	I0704 02:37:18.095627 1461358 start.go:360] acquireMachinesLock for old-k8s-version-610521: {Name:mk46072d75923ee064aa7d233083e66bfb2141bd Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0704 02:37:18.095712 1461358 start.go:364] duration metric: took 56.072µs to acquireMachinesLock for "old-k8s-version-610521"
	I0704 02:37:18.095739 1461358 start.go:96] Skipping create...Using existing machine configuration
	I0704 02:37:18.095750 1461358 fix.go:54] fixHost starting: 
	I0704 02:37:18.096069 1461358 cli_runner.go:164] Run: docker container inspect old-k8s-version-610521 --format={{.State.Status}}
	I0704 02:37:18.112855 1461358 fix.go:112] recreateIfNeeded on old-k8s-version-610521: state=Stopped err=<nil>
	W0704 02:37:18.112889 1461358 fix.go:138] unexpected machine state, will restart: <nil>
	I0704 02:37:18.115280 1461358 out.go:177] * Restarting existing docker container for "old-k8s-version-610521" ...
	I0704 02:37:18.117267 1461358 cli_runner.go:164] Run: docker start old-k8s-version-610521
	I0704 02:37:18.442486 1461358 cli_runner.go:164] Run: docker container inspect old-k8s-version-610521 --format={{.State.Status}}
	I0704 02:37:18.462672 1461358 kic.go:430] container "old-k8s-version-610521" state is running.
	I0704 02:37:18.463230 1461358 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" old-k8s-version-610521
	I0704 02:37:18.489254 1461358 profile.go:143] Saving config to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/config.json ...
	I0704 02:37:18.489922 1461358 machine.go:94] provisionDockerMachine start ...
	I0704 02:37:18.490027 1461358 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-610521
	I0704 02:37:18.512077 1461358 main.go:141] libmachine: Using SSH client type: native
	I0704 02:37:18.512712 1461358 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e2ba0] 0x3e5400 <nil>  [] 0s} 127.0.0.1 34276 <nil> <nil>}
	I0704 02:37:18.512733 1461358 main.go:141] libmachine: About to run SSH command:
	hostname
	I0704 02:37:18.513359 1461358 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: read tcp 127.0.0.1:38254->127.0.0.1:34276: read: connection reset by peer
	I0704 02:37:21.656393 1461358 main.go:141] libmachine: SSH cmd err, output: <nil>: old-k8s-version-610521
	
	I0704 02:37:21.656418 1461358 ubuntu.go:169] provisioning hostname "old-k8s-version-610521"
	I0704 02:37:21.656484 1461358 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-610521
	I0704 02:37:21.675783 1461358 main.go:141] libmachine: Using SSH client type: native
	I0704 02:37:21.676049 1461358 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e2ba0] 0x3e5400 <nil>  [] 0s} 127.0.0.1 34276 <nil> <nil>}
	I0704 02:37:21.676065 1461358 main.go:141] libmachine: About to run SSH command:
	sudo hostname old-k8s-version-610521 && echo "old-k8s-version-610521" | sudo tee /etc/hostname
	I0704 02:37:21.828443 1461358 main.go:141] libmachine: SSH cmd err, output: <nil>: old-k8s-version-610521
	
	I0704 02:37:21.828519 1461358 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-610521
	I0704 02:37:21.845296 1461358 main.go:141] libmachine: Using SSH client type: native
	I0704 02:37:21.845557 1461358 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e2ba0] 0x3e5400 <nil>  [] 0s} 127.0.0.1 34276 <nil> <nil>}
	I0704 02:37:21.845580 1461358 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sold-k8s-version-610521' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 old-k8s-version-610521/g' /etc/hosts;
				else 
					echo '127.0.1.1 old-k8s-version-610521' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0704 02:37:21.991988 1461358 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0704 02:37:21.992017 1461358 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/18859-1190282/.minikube CaCertPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/18859-1190282/.minikube}
	I0704 02:37:21.992038 1461358 ubuntu.go:177] setting up certificates
	I0704 02:37:21.992047 1461358 provision.go:84] configureAuth start
	I0704 02:37:21.992112 1461358 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" old-k8s-version-610521
	I0704 02:37:22.016078 1461358 provision.go:143] copyHostCerts
	I0704 02:37:22.016161 1461358 exec_runner.go:144] found /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.pem, removing ...
	I0704 02:37:22.016186 1461358 exec_runner.go:203] rm: /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.pem
	I0704 02:37:22.016282 1461358 exec_runner.go:151] cp: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.pem (1078 bytes)
	I0704 02:37:22.016439 1461358 exec_runner.go:144] found /home/jenkins/minikube-integration/18859-1190282/.minikube/cert.pem, removing ...
	I0704 02:37:22.016452 1461358 exec_runner.go:203] rm: /home/jenkins/minikube-integration/18859-1190282/.minikube/cert.pem
	I0704 02:37:22.016487 1461358 exec_runner.go:151] cp: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/18859-1190282/.minikube/cert.pem (1123 bytes)
	I0704 02:37:22.016568 1461358 exec_runner.go:144] found /home/jenkins/minikube-integration/18859-1190282/.minikube/key.pem, removing ...
	I0704 02:37:22.016578 1461358 exec_runner.go:203] rm: /home/jenkins/minikube-integration/18859-1190282/.minikube/key.pem
	I0704 02:37:22.016606 1461358 exec_runner.go:151] cp: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/18859-1190282/.minikube/key.pem (1675 bytes)
	I0704 02:37:22.016672 1461358 provision.go:117] generating server cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca-key.pem org=jenkins.old-k8s-version-610521 san=[127.0.0.1 192.168.76.2 localhost minikube old-k8s-version-610521]
	I0704 02:37:22.888440 1461358 provision.go:177] copyRemoteCerts
	I0704 02:37:22.888545 1461358 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0704 02:37:22.888604 1461358 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-610521
	I0704 02:37:22.905415 1461358 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34276 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/old-k8s-version-610521/id_rsa Username:docker}
	I0704 02:37:23.008413 1461358 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server.pem --> /etc/docker/server.pem (1233 bytes)
	I0704 02:37:23.034507 1461358 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0704 02:37:23.060278 1461358 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0704 02:37:23.086387 1461358 provision.go:87] duration metric: took 1.094323994s to configureAuth
	I0704 02:37:23.086414 1461358 ubuntu.go:193] setting minikube options for container-runtime
	I0704 02:37:23.086646 1461358 config.go:182] Loaded profile config "old-k8s-version-610521": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.20.0
	I0704 02:37:23.086665 1461358 machine.go:97] duration metric: took 4.596711289s to provisionDockerMachine
	I0704 02:37:23.086675 1461358 start.go:293] postStartSetup for "old-k8s-version-610521" (driver="docker")
	I0704 02:37:23.086691 1461358 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0704 02:37:23.086752 1461358 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0704 02:37:23.086797 1461358 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-610521
	I0704 02:37:23.105179 1461358 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34276 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/old-k8s-version-610521/id_rsa Username:docker}
	I0704 02:37:23.208976 1461358 ssh_runner.go:195] Run: cat /etc/os-release
	I0704 02:37:23.212515 1461358 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0704 02:37:23.212553 1461358 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0704 02:37:23.212589 1461358 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0704 02:37:23.212604 1461358 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0704 02:37:23.212615 1461358 filesync.go:126] Scanning /home/jenkins/minikube-integration/18859-1190282/.minikube/addons for local assets ...
	I0704 02:37:23.212686 1461358 filesync.go:126] Scanning /home/jenkins/minikube-integration/18859-1190282/.minikube/files for local assets ...
	I0704 02:37:23.212778 1461358 filesync.go:149] local asset: /home/jenkins/minikube-integration/18859-1190282/.minikube/files/etc/ssl/certs/11956882.pem -> 11956882.pem in /etc/ssl/certs
	I0704 02:37:23.212891 1461358 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0704 02:37:23.221538 1461358 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/files/etc/ssl/certs/11956882.pem --> /etc/ssl/certs/11956882.pem (1708 bytes)
	I0704 02:37:23.250072 1461358 start.go:296] duration metric: took 163.375709ms for postStartSetup
	I0704 02:37:23.250153 1461358 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0704 02:37:23.250197 1461358 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-610521
	I0704 02:37:23.267059 1461358 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34276 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/old-k8s-version-610521/id_rsa Username:docker}
	I0704 02:37:23.360298 1461358 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0704 02:37:23.364774 1461358 fix.go:56] duration metric: took 5.269016539s for fixHost
	I0704 02:37:23.364800 1461358 start.go:83] releasing machines lock for "old-k8s-version-610521", held for 5.269074835s
	I0704 02:37:23.364896 1461358 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" old-k8s-version-610521
	I0704 02:37:23.384439 1461358 ssh_runner.go:195] Run: cat /version.json
	I0704 02:37:23.384506 1461358 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-610521
	I0704 02:37:23.384748 1461358 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0704 02:37:23.384793 1461358 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-610521
	I0704 02:37:23.403106 1461358 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34276 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/old-k8s-version-610521/id_rsa Username:docker}
	I0704 02:37:23.409205 1461358 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34276 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/old-k8s-version-610521/id_rsa Username:docker}
	I0704 02:37:23.498976 1461358 ssh_runner.go:195] Run: systemctl --version
	I0704 02:37:23.638995 1461358 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0704 02:37:23.643348 1461358 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0704 02:37:23.661296 1461358 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0704 02:37:23.661396 1461358 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0704 02:37:23.670329 1461358 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0704 02:37:23.670405 1461358 start.go:495] detecting cgroup driver to use...
	I0704 02:37:23.670453 1461358 detect.go:196] detected "cgroupfs" cgroup driver on host os
	I0704 02:37:23.670531 1461358 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0704 02:37:23.684451 1461358 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0704 02:37:23.696020 1461358 docker.go:217] disabling cri-docker service (if available) ...
	I0704 02:37:23.696135 1461358 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0704 02:37:23.709091 1461358 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0704 02:37:23.720911 1461358 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0704 02:37:23.809800 1461358 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0704 02:37:23.900942 1461358 docker.go:233] disabling docker service ...
	I0704 02:37:23.901008 1461358 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0704 02:37:23.914567 1461358 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0704 02:37:23.926562 1461358 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0704 02:37:24.018584 1461358 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0704 02:37:24.138226 1461358 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0704 02:37:24.153028 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0704 02:37:24.172300 1461358 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.2"|' /etc/containerd/config.toml"
	I0704 02:37:24.184330 1461358 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0704 02:37:24.195988 1461358 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0704 02:37:24.196076 1461358 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0704 02:37:24.207175 1461358 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0704 02:37:24.218801 1461358 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0704 02:37:24.230399 1461358 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0704 02:37:24.244281 1461358 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0704 02:37:24.254568 1461358 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0704 02:37:24.266538 1461358 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0704 02:37:24.276843 1461358 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0704 02:37:24.286359 1461358 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0704 02:37:24.380872 1461358 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0704 02:37:24.562589 1461358 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0704 02:37:24.562689 1461358 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0704 02:37:24.566794 1461358 start.go:563] Will wait 60s for crictl version
	I0704 02:37:24.566888 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:37:24.570942 1461358 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0704 02:37:24.609898 1461358 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.18
	RuntimeApiVersion:  v1
	I0704 02:37:24.610025 1461358 ssh_runner.go:195] Run: containerd --version
	I0704 02:37:24.634959 1461358 ssh_runner.go:195] Run: containerd --version
	I0704 02:37:24.663374 1461358 out.go:177] * Preparing Kubernetes v1.20.0 on containerd 1.7.18 ...
	I0704 02:37:24.665158 1461358 cli_runner.go:164] Run: docker network inspect old-k8s-version-610521 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0704 02:37:24.685834 1461358 ssh_runner.go:195] Run: grep 192.168.76.1	host.minikube.internal$ /etc/hosts
	I0704 02:37:24.689581 1461358 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.76.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0704 02:37:24.700533 1461358 kubeadm.go:877] updating cluster {Name:old-k8s-version-610521 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:true NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.20.0 ClusterName:old-k8s-version-610521 Namespace:default APIServerHAVIP: APIServerName:minik
ubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.76.2 Port:8443 KubernetesVersion:v1.20.0 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[dashboard:true default-storageclass:true metrics-server:true storage-provisioner:true] CustomAddonImages:map[MetricsScraper:registry.k8s.io/echoserver:1.4 MetricsServer:registry.k8s.io/echoserver:1.4] CustomAddonRegistries:map[MetricsServer:fake.domain] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/
home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0704 02:37:24.700663 1461358 preload.go:132] Checking if preload exists for k8s version v1.20.0 and runtime containerd
	I0704 02:37:24.700728 1461358 ssh_runner.go:195] Run: sudo crictl images --output json
	I0704 02:37:24.751788 1461358 containerd.go:627] all images are preloaded for containerd runtime.
	I0704 02:37:24.751813 1461358 containerd.go:534] Images already preloaded, skipping extraction
	I0704 02:37:24.751873 1461358 ssh_runner.go:195] Run: sudo crictl images --output json
	I0704 02:37:24.791586 1461358 containerd.go:627] all images are preloaded for containerd runtime.
	I0704 02:37:24.791612 1461358 cache_images.go:84] Images are preloaded, skipping loading
	I0704 02:37:24.791621 1461358 kubeadm.go:928] updating node { 192.168.76.2 8443 v1.20.0 containerd true true} ...
	I0704 02:37:24.791752 1461358 kubeadm.go:940] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.20.0/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --container-runtime=remote --container-runtime-endpoint=unix:///run/containerd/containerd.sock --hostname-override=old-k8s-version-610521 --kubeconfig=/etc/kubernetes/kubelet.conf --network-plugin=cni --node-ip=192.168.76.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.20.0 ClusterName:old-k8s-version-610521 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0704 02:37:24.791827 1461358 ssh_runner.go:195] Run: sudo crictl info
	I0704 02:37:24.833098 1461358 cni.go:84] Creating CNI manager for ""
	I0704 02:37:24.833123 1461358 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0704 02:37:24.833132 1461358 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0704 02:37:24.833152 1461358 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.76.2 APIServerPort:8443 KubernetesVersion:v1.20.0 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:old-k8s-version-610521 NodeName:old-k8s-version-610521 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.76.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.76.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt
StaticPodPath:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:false}
	I0704 02:37:24.833274 1461358 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta2
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.76.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: /run/containerd/containerd.sock
	  name: "old-k8s-version-610521"
	  kubeletExtraArgs:
	    node-ip: 192.168.76.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta2
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.76.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	dns:
	  type: CoreDNS
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.20.0
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0704 02:37:24.833342 1461358 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.20.0
	I0704 02:37:24.843242 1461358 binaries.go:44] Found k8s binaries, skipping transfer
	I0704 02:37:24.843317 1461358 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0704 02:37:24.852480 1461358 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (442 bytes)
	I0704 02:37:24.871233 1461358 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0704 02:37:24.890141 1461358 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2125 bytes)
	I0704 02:37:24.909546 1461358 ssh_runner.go:195] Run: grep 192.168.76.2	control-plane.minikube.internal$ /etc/hosts
	I0704 02:37:24.913377 1461358 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.76.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0704 02:37:24.924559 1461358 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0704 02:37:25.012506 1461358 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0704 02:37:25.031027 1461358 certs.go:68] Setting up /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521 for IP: 192.168.76.2
	I0704 02:37:25.031056 1461358 certs.go:194] generating shared ca certs ...
	I0704 02:37:25.031074 1461358 certs.go:226] acquiring lock for ca certs: {Name:mk4f0dbc18506f7ee4fcbc10f124348dd208ffc0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 02:37:25.031246 1461358 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.key
	I0704 02:37:25.031310 1461358 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.key
	I0704 02:37:25.031326 1461358 certs.go:256] generating profile certs ...
	I0704 02:37:25.031431 1461358 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/client.key
	I0704 02:37:25.031539 1461358 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/apiserver.key.e33ede4c
	I0704 02:37:25.031593 1461358 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/proxy-client.key
	I0704 02:37:25.031723 1461358 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/1195688.pem (1338 bytes)
	W0704 02:37:25.031754 1461358 certs.go:480] ignoring /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/1195688_empty.pem, impossibly tiny 0 bytes
	I0704 02:37:25.031770 1461358 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca-key.pem (1679 bytes)
	I0704 02:37:25.031797 1461358 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem (1078 bytes)
	I0704 02:37:25.031826 1461358 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/cert.pem (1123 bytes)
	I0704 02:37:25.031852 1461358 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/key.pem (1675 bytes)
	I0704 02:37:25.031898 1461358 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/files/etc/ssl/certs/11956882.pem (1708 bytes)
	I0704 02:37:25.032577 1461358 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0704 02:37:25.079937 1461358 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1671 bytes)
	I0704 02:37:25.110640 1461358 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0704 02:37:25.147958 1461358 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0704 02:37:25.181679 1461358 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1432 bytes)
	I0704 02:37:25.218727 1461358 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0704 02:37:25.253252 1461358 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0704 02:37:25.281244 1461358 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0704 02:37:25.308332 1461358 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/1195688.pem --> /usr/share/ca-certificates/1195688.pem (1338 bytes)
	I0704 02:37:25.333052 1461358 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/files/etc/ssl/certs/11956882.pem --> /usr/share/ca-certificates/11956882.pem (1708 bytes)
	I0704 02:37:25.358157 1461358 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0704 02:37:25.385838 1461358 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0704 02:37:25.404236 1461358 ssh_runner.go:195] Run: openssl version
	I0704 02:37:25.410003 1461358 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/11956882.pem && ln -fs /usr/share/ca-certificates/11956882.pem /etc/ssl/certs/11956882.pem"
	I0704 02:37:25.419519 1461358 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/11956882.pem
	I0704 02:37:25.423011 1461358 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Jul  4 01:49 /usr/share/ca-certificates/11956882.pem
	I0704 02:37:25.423074 1461358 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/11956882.pem
	I0704 02:37:25.430008 1461358 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/11956882.pem /etc/ssl/certs/3ec20f2e.0"
	I0704 02:37:25.439310 1461358 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0704 02:37:25.449193 1461358 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0704 02:37:25.452917 1461358 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Jul  4 01:08 /usr/share/ca-certificates/minikubeCA.pem
	I0704 02:37:25.452987 1461358 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0704 02:37:25.459890 1461358 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0704 02:37:25.469554 1461358 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1195688.pem && ln -fs /usr/share/ca-certificates/1195688.pem /etc/ssl/certs/1195688.pem"
	I0704 02:37:25.479143 1461358 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1195688.pem
	I0704 02:37:25.482903 1461358 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Jul  4 01:49 /usr/share/ca-certificates/1195688.pem
	I0704 02:37:25.482972 1461358 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1195688.pem
	I0704 02:37:25.490137 1461358 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1195688.pem /etc/ssl/certs/51391683.0"
	I0704 02:37:25.499449 1461358 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0704 02:37:25.503200 1461358 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0704 02:37:25.510229 1461358 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0704 02:37:25.517626 1461358 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0704 02:37:25.524824 1461358 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0704 02:37:25.532497 1461358 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0704 02:37:25.540075 1461358 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0704 02:37:25.547190 1461358 kubeadm.go:391] StartCluster: {Name:old-k8s-version-610521 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:true NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.20.0 ClusterName:old-k8s-version-610521 Namespace:default APIServerHAVIP: APIServerName:minikube
CA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.76.2 Port:8443 KubernetesVersion:v1.20.0 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[dashboard:true default-storageclass:true metrics-server:true storage-provisioner:true] CustomAddonImages:map[MetricsScraper:registry.k8s.io/echoserver:1.4 MetricsServer:registry.k8s.io/echoserver:1.4] CustomAddonRegistries:map[MetricsServer:fake.domain] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/hom
e/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0704 02:37:25.547288 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0704 02:37:25.547374 1461358 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0704 02:37:25.596977 1461358 cri.go:89] found id: "71e03ac7da71796f0a3c61c8049ad7bacbefbec7a5934b306ad0b70906bb323f"
	I0704 02:37:25.597039 1461358 cri.go:89] found id: "ff0d8a658cd4232702700190007d62520a4a45872a028529b9ed36fe6dc3549f"
	I0704 02:37:25.597055 1461358 cri.go:89] found id: "6fc18d433da5d8f66a062bca5cb9c70a4ca699e2c754c143961fd3420a10260b"
	I0704 02:37:25.597059 1461358 cri.go:89] found id: "e36274ec135b818053c9b1ddec36771b772a75851df732aa1a4fa768cc912ac8"
	I0704 02:37:25.597062 1461358 cri.go:89] found id: "1332e9cb16fba0754aab5a90d7b820adf8d9a1d1c927c7ca3c4030879eae0e39"
	I0704 02:37:25.597066 1461358 cri.go:89] found id: "186d8c36c000cb07d64c9a71d61cc5b1cf836660ec112c7bc99feeea6eb45bd1"
	I0704 02:37:25.597069 1461358 cri.go:89] found id: "c1881aed94a4bf9ecfebcec75304d41e97b25974bd8a653d24b470be7bbaa222"
	I0704 02:37:25.597072 1461358 cri.go:89] found id: "c77b76d8ee8e90997b74a44444dda1ceb1606c1bcac6e2e897f099aefbae9ca5"
	I0704 02:37:25.597078 1461358 cri.go:89] found id: ""
	I0704 02:37:25.597134 1461358 ssh_runner.go:195] Run: sudo runc --root /run/containerd/runc/k8s.io list -f json
	I0704 02:37:25.610238 1461358 cri.go:116] JSON = null
	W0704 02:37:25.610290 1461358 kubeadm.go:398] unpause failed: list paused: list returned 0 containers, but ps returned 8
	I0704 02:37:25.610355 1461358 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	W0704 02:37:25.619998 1461358 kubeadm.go:404] apiserver tunnel failed: apiserver port not set
	I0704 02:37:25.620022 1461358 kubeadm.go:407] found existing configuration files, will attempt cluster restart
	I0704 02:37:25.620037 1461358 kubeadm.go:587] restartPrimaryControlPlane start ...
	I0704 02:37:25.620107 1461358 ssh_runner.go:195] Run: sudo test -d /data/minikube
	I0704 02:37:25.629720 1461358 kubeadm.go:129] /data/minikube skipping compat symlinks: sudo test -d /data/minikube: Process exited with status 1
	stdout:
	
	stderr:
	I0704 02:37:25.630471 1461358 kubeconfig.go:47] verify endpoint returned: get endpoint: "old-k8s-version-610521" does not appear in /home/jenkins/minikube-integration/18859-1190282/kubeconfig
	I0704 02:37:25.630810 1461358 kubeconfig.go:62] /home/jenkins/minikube-integration/18859-1190282/kubeconfig needs updating (will repair): [kubeconfig missing "old-k8s-version-610521" cluster setting kubeconfig missing "old-k8s-version-610521" context setting]
	I0704 02:37:25.631645 1461358 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/kubeconfig: {Name:mkcb1dc68318dea0090dbb67854ab85e2d8d0252 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 02:37:25.633305 1461358 ssh_runner.go:195] Run: sudo diff -u /var/tmp/minikube/kubeadm.yaml /var/tmp/minikube/kubeadm.yaml.new
	I0704 02:37:25.642728 1461358 kubeadm.go:624] The running cluster does not require reconfiguration: 192.168.76.2
	I0704 02:37:25.642811 1461358 kubeadm.go:591] duration metric: took 22.76775ms to restartPrimaryControlPlane
	I0704 02:37:25.642827 1461358 kubeadm.go:393] duration metric: took 95.647051ms to StartCluster
	I0704 02:37:25.642844 1461358 settings.go:142] acquiring lock: {Name:mk6d49b718ddc65478a80e50434df6064c31eee4 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 02:37:25.642917 1461358 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/18859-1190282/kubeconfig
	I0704 02:37:25.643901 1461358 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/kubeconfig: {Name:mkcb1dc68318dea0090dbb67854ab85e2d8d0252 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 02:37:25.644111 1461358 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.76.2 Port:8443 KubernetesVersion:v1.20.0 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0704 02:37:25.644451 1461358 config.go:182] Loaded profile config "old-k8s-version-610521": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.20.0
	I0704 02:37:25.644494 1461358 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:true default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:true nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0704 02:37:25.644574 1461358 addons.go:69] Setting storage-provisioner=true in profile "old-k8s-version-610521"
	I0704 02:37:25.644598 1461358 addons.go:234] Setting addon storage-provisioner=true in "old-k8s-version-610521"
	W0704 02:37:25.644604 1461358 addons.go:243] addon storage-provisioner should already be in state true
	I0704 02:37:25.644756 1461358 host.go:66] Checking if "old-k8s-version-610521" exists ...
	I0704 02:37:25.644704 1461358 addons.go:69] Setting default-storageclass=true in profile "old-k8s-version-610521"
	I0704 02:37:25.644843 1461358 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "old-k8s-version-610521"
	I0704 02:37:25.645210 1461358 cli_runner.go:164] Run: docker container inspect old-k8s-version-610521 --format={{.State.Status}}
	I0704 02:37:25.645282 1461358 cli_runner.go:164] Run: docker container inspect old-k8s-version-610521 --format={{.State.Status}}
	I0704 02:37:25.644709 1461358 addons.go:69] Setting dashboard=true in profile "old-k8s-version-610521"
	I0704 02:37:25.645876 1461358 addons.go:234] Setting addon dashboard=true in "old-k8s-version-610521"
	W0704 02:37:25.645892 1461358 addons.go:243] addon dashboard should already be in state true
	I0704 02:37:25.645917 1461358 host.go:66] Checking if "old-k8s-version-610521" exists ...
	I0704 02:37:25.646325 1461358 cli_runner.go:164] Run: docker container inspect old-k8s-version-610521 --format={{.State.Status}}
	I0704 02:37:25.644719 1461358 addons.go:69] Setting metrics-server=true in profile "old-k8s-version-610521"
	I0704 02:37:25.646449 1461358 addons.go:234] Setting addon metrics-server=true in "old-k8s-version-610521"
	W0704 02:37:25.646461 1461358 addons.go:243] addon metrics-server should already be in state true
	I0704 02:37:25.646536 1461358 host.go:66] Checking if "old-k8s-version-610521" exists ...
	I0704 02:37:25.646929 1461358 cli_runner.go:164] Run: docker container inspect old-k8s-version-610521 --format={{.State.Status}}
	I0704 02:37:25.649971 1461358 out.go:177] * Verifying Kubernetes components...
	I0704 02:37:25.651873 1461358 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0704 02:37:25.697780 1461358 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0704 02:37:25.698549 1461358 addons.go:234] Setting addon default-storageclass=true in "old-k8s-version-610521"
	W0704 02:37:25.698565 1461358 addons.go:243] addon default-storageclass should already be in state true
	I0704 02:37:25.698592 1461358 host.go:66] Checking if "old-k8s-version-610521" exists ...
	I0704 02:37:25.699030 1461358 cli_runner.go:164] Run: docker container inspect old-k8s-version-610521 --format={{.State.Status}}
	I0704 02:37:25.699935 1461358 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0704 02:37:25.699951 1461358 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0704 02:37:25.700011 1461358 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-610521
	I0704 02:37:25.718468 1461358 out.go:177]   - Using image fake.domain/registry.k8s.io/echoserver:1.4
	I0704 02:37:25.720384 1461358 addons.go:431] installing /etc/kubernetes/addons/metrics-apiservice.yaml
	I0704 02:37:25.720407 1461358 ssh_runner.go:362] scp metrics-server/metrics-apiservice.yaml --> /etc/kubernetes/addons/metrics-apiservice.yaml (424 bytes)
	I0704 02:37:25.720490 1461358 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-610521
	I0704 02:37:25.723662 1461358 out.go:177]   - Using image docker.io/kubernetesui/dashboard:v2.7.0
	I0704 02:37:25.728764 1461358 out.go:177]   - Using image registry.k8s.io/echoserver:1.4
	I0704 02:37:25.730615 1461358 addons.go:431] installing /etc/kubernetes/addons/dashboard-ns.yaml
	I0704 02:37:25.730636 1461358 ssh_runner.go:362] scp dashboard/dashboard-ns.yaml --> /etc/kubernetes/addons/dashboard-ns.yaml (759 bytes)
	I0704 02:37:25.730703 1461358 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-610521
	I0704 02:37:25.748681 1461358 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34276 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/old-k8s-version-610521/id_rsa Username:docker}
	I0704 02:37:25.776416 1461358 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0704 02:37:25.776437 1461358 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0704 02:37:25.776504 1461358 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-610521
	I0704 02:37:25.784087 1461358 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34276 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/old-k8s-version-610521/id_rsa Username:docker}
	I0704 02:37:25.827956 1461358 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34276 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/old-k8s-version-610521/id_rsa Username:docker}
	I0704 02:37:25.828118 1461358 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34276 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/old-k8s-version-610521/id_rsa Username:docker}
	I0704 02:37:25.852503 1461358 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0704 02:37:25.879232 1461358 node_ready.go:35] waiting up to 6m0s for node "old-k8s-version-610521" to be "Ready" ...
	I0704 02:37:25.913422 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0704 02:37:25.914606 1461358 addons.go:431] installing /etc/kubernetes/addons/metrics-server-deployment.yaml
	I0704 02:37:25.914627 1461358 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-deployment.yaml (1825 bytes)
	I0704 02:37:25.937405 1461358 addons.go:431] installing /etc/kubernetes/addons/metrics-server-rbac.yaml
	I0704 02:37:25.937434 1461358 ssh_runner.go:362] scp metrics-server/metrics-server-rbac.yaml --> /etc/kubernetes/addons/metrics-server-rbac.yaml (2175 bytes)
	I0704 02:37:25.972998 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0704 02:37:25.988467 1461358 addons.go:431] installing /etc/kubernetes/addons/dashboard-clusterrole.yaml
	I0704 02:37:25.988494 1461358 ssh_runner.go:362] scp dashboard/dashboard-clusterrole.yaml --> /etc/kubernetes/addons/dashboard-clusterrole.yaml (1001 bytes)
	I0704 02:37:25.993801 1461358 addons.go:431] installing /etc/kubernetes/addons/metrics-server-service.yaml
	I0704 02:37:25.993825 1461358 ssh_runner.go:362] scp metrics-server/metrics-server-service.yaml --> /etc/kubernetes/addons/metrics-server-service.yaml (446 bytes)
	I0704 02:37:26.021737 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	I0704 02:37:26.048264 1461358 addons.go:431] installing /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml
	I0704 02:37:26.048295 1461358 ssh_runner.go:362] scp dashboard/dashboard-clusterrolebinding.yaml --> /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml (1018 bytes)
	W0704 02:37:26.090714 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:26.090747 1461358 retry.go:31] will retry after 135.796473ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:26.130757 1461358 addons.go:431] installing /etc/kubernetes/addons/dashboard-configmap.yaml
	I0704 02:37:26.130782 1461358 ssh_runner.go:362] scp dashboard/dashboard-configmap.yaml --> /etc/kubernetes/addons/dashboard-configmap.yaml (837 bytes)
	I0704 02:37:26.170491 1461358 addons.go:431] installing /etc/kubernetes/addons/dashboard-dp.yaml
	I0704 02:37:26.170516 1461358 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/dashboard-dp.yaml (4201 bytes)
	W0704 02:37:26.178522 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:26.178554 1461358 retry.go:31] will retry after 250.888747ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	W0704 02:37:26.178643 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:26.178681 1461358 retry.go:31] will retry after 132.075547ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:26.191470 1461358 addons.go:431] installing /etc/kubernetes/addons/dashboard-role.yaml
	I0704 02:37:26.191596 1461358 ssh_runner.go:362] scp dashboard/dashboard-role.yaml --> /etc/kubernetes/addons/dashboard-role.yaml (1724 bytes)
	I0704 02:37:26.210134 1461358 addons.go:431] installing /etc/kubernetes/addons/dashboard-rolebinding.yaml
	I0704 02:37:26.210160 1461358 ssh_runner.go:362] scp dashboard/dashboard-rolebinding.yaml --> /etc/kubernetes/addons/dashboard-rolebinding.yaml (1046 bytes)
	I0704 02:37:26.226914 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0704 02:37:26.229382 1461358 addons.go:431] installing /etc/kubernetes/addons/dashboard-sa.yaml
	I0704 02:37:26.229449 1461358 ssh_runner.go:362] scp dashboard/dashboard-sa.yaml --> /etc/kubernetes/addons/dashboard-sa.yaml (837 bytes)
	I0704 02:37:26.263632 1461358 addons.go:431] installing /etc/kubernetes/addons/dashboard-secret.yaml
	I0704 02:37:26.263708 1461358 ssh_runner.go:362] scp dashboard/dashboard-secret.yaml --> /etc/kubernetes/addons/dashboard-secret.yaml (1389 bytes)
	I0704 02:37:26.288632 1461358 addons.go:431] installing /etc/kubernetes/addons/dashboard-svc.yaml
	I0704 02:37:26.288708 1461358 ssh_runner.go:362] scp dashboard/dashboard-svc.yaml --> /etc/kubernetes/addons/dashboard-svc.yaml (1294 bytes)
	I0704 02:37:26.310955 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	I0704 02:37:26.320640 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml
	W0704 02:37:26.335274 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:26.335353 1461358 retry.go:31] will retry after 391.247689ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	W0704 02:37:26.424984 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:26.425018 1461358 retry.go:31] will retry after 547.479183ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	W0704 02:37:26.425069 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:26.425083 1461358 retry.go:31] will retry after 366.493196ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:26.430334 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml
	W0704 02:37:26.506731 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:26.506781 1461358 retry.go:31] will retry after 274.365706ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:26.727168 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0704 02:37:26.781654 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml
	I0704 02:37:26.792032 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml
	W0704 02:37:26.830303 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:26.830381 1461358 retry.go:31] will retry after 381.369317ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	W0704 02:37:26.902956 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:26.903050 1461358 retry.go:31] will retry after 342.915414ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	W0704 02:37:26.912950 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:26.912985 1461358 retry.go:31] will retry after 320.991964ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:26.973152 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	W0704 02:37:27.059448 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:27.059510 1461358 retry.go:31] will retry after 331.538591ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:27.212971 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0704 02:37:27.234303 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml
	I0704 02:37:27.246845 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml
	W0704 02:37:27.334674 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:27.334778 1461358 retry.go:31] will retry after 854.459586ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:27.391311 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	W0704 02:37:27.397157 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:27.397243 1461358 retry.go:31] will retry after 1.089903746s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	W0704 02:37:27.397336 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:27.397395 1461358 retry.go:31] will retry after 411.482989ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	W0704 02:37:27.464516 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:27.464550 1461358 retry.go:31] will retry after 780.918061ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:27.810010 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml
	I0704 02:37:27.880345 1461358 node_ready.go:53] error getting node "old-k8s-version-610521": Get "https://192.168.76.2:8443/api/v1/nodes/old-k8s-version-610521": dial tcp 192.168.76.2:8443: connect: connection refused
	W0704 02:37:27.885535 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:27.885572 1461358 retry.go:31] will retry after 652.462195ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:28.190060 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0704 02:37:28.246609 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	W0704 02:37:28.268984 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:28.269085 1461358 retry.go:31] will retry after 780.776313ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	W0704 02:37:28.337025 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:28.337058 1461358 retry.go:31] will retry after 1.86435522s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:28.487952 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml
	I0704 02:37:28.538268 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml
	W0704 02:37:28.572741 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:28.572777 1461358 retry.go:31] will retry after 1.59569632s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	W0704 02:37:28.618510 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:28.618543 1461358 retry.go:31] will retry after 1.342834685s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:29.050560 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml
	W0704 02:37:29.119361 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:29.119393 1461358 retry.go:31] will retry after 2.805763353s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:29.962245 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml
	W0704 02:37:30.059903 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:30.059938 1461358 retry.go:31] will retry after 1.711283626s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:30.169335 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml
	I0704 02:37:30.201838 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	W0704 02:37:30.285057 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:30.285088 1461358 retry.go:31] will retry after 1.770670119s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	W0704 02:37:30.334282 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:30.334312 1461358 retry.go:31] will retry after 1.085130257s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:30.379907 1461358 node_ready.go:53] error getting node "old-k8s-version-610521": Get "https://192.168.76.2:8443/api/v1/nodes/old-k8s-version-610521": dial tcp 192.168.76.2:8443: connect: connection refused
	I0704 02:37:31.420004 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	W0704 02:37:31.492570 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:31.492604 1461358 retry.go:31] will retry after 2.411452585s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:31.771950 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml
	W0704 02:37:31.845373 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:31.845407 1461358 retry.go:31] will retry after 3.328797234s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:31.926040 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml
	W0704 02:37:32.000318 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:32.000360 1461358 retry.go:31] will retry after 2.841475914s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:32.056598 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml
	W0704 02:37:32.124311 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:32.124342 1461358 retry.go:31] will retry after 4.126303821s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:32.380145 1461358 node_ready.go:53] error getting node "old-k8s-version-610521": Get "https://192.168.76.2:8443/api/v1/nodes/old-k8s-version-610521": dial tcp 192.168.76.2:8443: connect: connection refused
	I0704 02:37:33.905077 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	W0704 02:37:34.050594 1461358 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:34.050626 1461358 retry.go:31] will retry after 6.259201202s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8443 was refused - did you specify the right host or port?
	I0704 02:37:34.380207 1461358 node_ready.go:53] error getting node "old-k8s-version-610521": Get "https://192.168.76.2:8443/api/v1/nodes/old-k8s-version-610521": dial tcp 192.168.76.2:8443: connect: connection refused
	I0704 02:37:34.842735 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0704 02:37:35.174435 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml
	I0704 02:37:36.251378 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml
	I0704 02:37:40.309963 1461358 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	I0704 02:37:41.993019 1461358 node_ready.go:49] node "old-k8s-version-610521" has status "Ready":"True"
	I0704 02:37:41.993119 1461358 node_ready.go:38] duration metric: took 16.113783771s for node "old-k8s-version-610521" to be "Ready" ...
	I0704 02:37:41.993157 1461358 pod_ready.go:35] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0704 02:37:42.084746 1461358 pod_ready.go:78] waiting up to 6m0s for pod "coredns-74ff55c5b-r7qv7" in "kube-system" namespace to be "Ready" ...
	I0704 02:37:42.232752 1461358 pod_ready.go:92] pod "coredns-74ff55c5b-r7qv7" in "kube-system" namespace has status "Ready":"True"
	I0704 02:37:42.232836 1461358 pod_ready.go:81] duration metric: took 147.998035ms for pod "coredns-74ff55c5b-r7qv7" in "kube-system" namespace to be "Ready" ...
	I0704 02:37:42.232866 1461358 pod_ready.go:78] waiting up to 6m0s for pod "etcd-old-k8s-version-610521" in "kube-system" namespace to be "Ready" ...
	I0704 02:37:42.260601 1461358 pod_ready.go:92] pod "etcd-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"True"
	I0704 02:37:42.260671 1461358 pod_ready.go:81] duration metric: took 27.780616ms for pod "etcd-old-k8s-version-610521" in "kube-system" namespace to be "Ready" ...
	I0704 02:37:42.260704 1461358 pod_ready.go:78] waiting up to 6m0s for pod "kube-apiserver-old-k8s-version-610521" in "kube-system" namespace to be "Ready" ...
	I0704 02:37:43.747725 1461358 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: (8.904952573s)
	I0704 02:37:43.922218 1461358 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: (8.747675671s)
	I0704 02:37:43.922349 1461358 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: (7.67093845s)
	I0704 02:37:43.922429 1461358 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply --force -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: (3.612436044s)
	I0704 02:37:43.922768 1461358 addons.go:475] Verifying addon metrics-server=true in "old-k8s-version-610521"
	I0704 02:37:43.924208 1461358 out.go:177] * Some dashboard features require the metrics-server addon. To enable all features please run:
	
		minikube -p old-k8s-version-610521 addons enable metrics-server
	
	I0704 02:37:43.933438 1461358 out.go:177] * Enabled addons: storage-provisioner, metrics-server, dashboard, default-storageclass
	I0704 02:37:43.935558 1461358 addons.go:510] duration metric: took 18.291049874s for enable addons: enabled=[storage-provisioner metrics-server dashboard default-storageclass]
	I0704 02:37:44.267134 1461358 pod_ready.go:102] pod "kube-apiserver-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:37:46.768073 1461358 pod_ready.go:102] pod "kube-apiserver-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:37:48.267228 1461358 pod_ready.go:92] pod "kube-apiserver-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"True"
	I0704 02:37:48.267256 1461358 pod_ready.go:81] duration metric: took 6.006528636s for pod "kube-apiserver-old-k8s-version-610521" in "kube-system" namespace to be "Ready" ...
	I0704 02:37:48.267269 1461358 pod_ready.go:78] waiting up to 6m0s for pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace to be "Ready" ...
	I0704 02:37:50.273422 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:37:52.273502 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:37:54.273557 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:37:56.274247 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:37:58.295850 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:00.774612 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:02.779034 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:05.274560 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:07.274785 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:09.278458 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:11.784333 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:14.274905 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:16.275126 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:18.777397 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:20.781477 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:23.274425 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:25.781186 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:28.273653 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:30.784731 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:33.273573 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:35.274622 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:37.786555 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:40.274016 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:42.275245 1461358 pod_ready.go:102] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:44.783956 1461358 pod_ready.go:92] pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"True"
	I0704 02:38:44.783981 1461358 pod_ready.go:81] duration metric: took 56.516704249s for pod "kube-controller-manager-old-k8s-version-610521" in "kube-system" namespace to be "Ready" ...
	I0704 02:38:44.783994 1461358 pod_ready.go:78] waiting up to 6m0s for pod "kube-proxy-q4c98" in "kube-system" namespace to be "Ready" ...
	I0704 02:38:44.789580 1461358 pod_ready.go:92] pod "kube-proxy-q4c98" in "kube-system" namespace has status "Ready":"True"
	I0704 02:38:44.789609 1461358 pod_ready.go:81] duration metric: took 5.606731ms for pod "kube-proxy-q4c98" in "kube-system" namespace to be "Ready" ...
	I0704 02:38:44.789620 1461358 pod_ready.go:78] waiting up to 6m0s for pod "kube-scheduler-old-k8s-version-610521" in "kube-system" namespace to be "Ready" ...
	I0704 02:38:46.795423 1461358 pod_ready.go:102] pod "kube-scheduler-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:48.796147 1461358 pod_ready.go:102] pod "kube-scheduler-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:50.796363 1461358 pod_ready.go:102] pod "kube-scheduler-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:52.796766 1461358 pod_ready.go:102] pod "kube-scheduler-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:55.297437 1461358 pod_ready.go:102] pod "kube-scheduler-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:57.796014 1461358 pod_ready.go:102] pod "kube-scheduler-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:38:59.796711 1461358 pod_ready.go:102] pod "kube-scheduler-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:02.296320 1461358 pod_ready.go:102] pod "kube-scheduler-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:03.795554 1461358 pod_ready.go:92] pod "kube-scheduler-old-k8s-version-610521" in "kube-system" namespace has status "Ready":"True"
	I0704 02:39:03.795578 1461358 pod_ready.go:81] duration metric: took 19.005948931s for pod "kube-scheduler-old-k8s-version-610521" in "kube-system" namespace to be "Ready" ...
	I0704 02:39:03.795588 1461358 pod_ready.go:78] waiting up to 6m0s for pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace to be "Ready" ...
	I0704 02:39:05.801938 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:07.802405 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:10.302370 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:12.302828 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:14.801534 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:16.801990 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:18.802250 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:21.301925 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:23.302962 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:25.800809 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:27.801866 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:29.802177 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:32.302025 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:34.302266 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:36.802243 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:39.302535 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:41.302707 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:43.801885 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:45.802665 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:48.302769 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:50.802656 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:52.846522 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:55.302336 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:57.302518 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:39:59.801457 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:01.802327 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:04.301799 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:06.303302 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:08.303935 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:10.801985 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:12.802168 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:15.301920 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:17.304235 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:19.801534 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:21.802360 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:23.802505 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:26.303742 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:28.305152 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:30.801315 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:32.802485 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:35.302850 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:37.802016 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:40.302026 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:42.302685 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:44.802297 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:47.301531 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:49.302167 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:51.302432 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:53.425384 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:55.815111 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:40:58.303176 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:00.313922 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:02.801774 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:05.309595 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:07.802344 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:09.802995 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:11.803248 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:14.303331 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:16.802901 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:19.303626 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:21.306144 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:23.801446 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:25.808977 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:28.302098 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:30.802500 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:33.302557 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:35.801970 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:38.303630 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:40.801705 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:42.801976 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:44.802359 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:46.802696 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:48.803734 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:51.302557 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:53.802058 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:55.803597 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:41:58.302165 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:00.305834 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:02.801598 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:05.301664 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:07.303473 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:09.801118 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:11.801828 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:14.341381 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:16.802112 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:19.301427 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:21.801975 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:24.302767 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:26.303619 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:28.802928 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:31.302198 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:33.302287 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:35.302917 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:37.302977 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:39.802200 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:42.301477 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:44.301588 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:46.302758 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:48.303630 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:50.802172 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:52.802864 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:54.918811 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:57.302656 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:59.803098 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:43:01.803611 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:43:03.805495 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:43:03.805577 1461358 pod_ready.go:81] duration metric: took 4m0.009979608s for pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace to be "Ready" ...
	E0704 02:43:03.805602 1461358 pod_ready.go:66] WaitExtra: waitPodCondition: context deadline exceeded
	I0704 02:43:03.805624 1461358 pod_ready.go:38] duration metric: took 5m21.812430295s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0704 02:43:03.805672 1461358 api_server.go:52] waiting for apiserver process to appear ...
	I0704 02:43:03.805728 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kube-apiserver Namespaces:[]}
	I0704 02:43:03.805834 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0704 02:43:03.864499 1461358 cri.go:89] found id: "60850fe005f0ab5cfc576b645d25d0daaadbd790e2fc616900504e76c1a1950d"
	I0704 02:43:03.864518 1461358 cri.go:89] found id: "186d8c36c000cb07d64c9a71d61cc5b1cf836660ec112c7bc99feeea6eb45bd1"
	I0704 02:43:03.864523 1461358 cri.go:89] found id: ""
	I0704 02:43:03.864530 1461358 logs.go:276] 2 containers: [60850fe005f0ab5cfc576b645d25d0daaadbd790e2fc616900504e76c1a1950d 186d8c36c000cb07d64c9a71d61cc5b1cf836660ec112c7bc99feeea6eb45bd1]
	I0704 02:43:03.864588 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:03.868307 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:03.872242 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:etcd Namespaces:[]}
	I0704 02:43:03.872311 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0704 02:43:03.939796 1461358 cri.go:89] found id: "8a8399e4a7b68b69943912a3d46d948be98f305a699513a7a8a124736077d249"
	I0704 02:43:03.939864 1461358 cri.go:89] found id: "1332e9cb16fba0754aab5a90d7b820adf8d9a1d1c927c7ca3c4030879eae0e39"
	I0704 02:43:03.939900 1461358 cri.go:89] found id: ""
	I0704 02:43:03.939925 1461358 logs.go:276] 2 containers: [8a8399e4a7b68b69943912a3d46d948be98f305a699513a7a8a124736077d249 1332e9cb16fba0754aab5a90d7b820adf8d9a1d1c927c7ca3c4030879eae0e39]
	I0704 02:43:03.940009 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:03.944079 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:03.948192 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:coredns Namespaces:[]}
	I0704 02:43:03.948273 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0704 02:43:03.990309 1461358 cri.go:89] found id: "58c28bfb15459945dd5649b964ec42a50cc8cbdc4feda5222594cdf64bf0bc09"
	I0704 02:43:03.990331 1461358 cri.go:89] found id: "71e03ac7da71796f0a3c61c8049ad7bacbefbec7a5934b306ad0b70906bb323f"
	I0704 02:43:03.990336 1461358 cri.go:89] found id: ""
	I0704 02:43:03.990344 1461358 logs.go:276] 2 containers: [58c28bfb15459945dd5649b964ec42a50cc8cbdc4feda5222594cdf64bf0bc09 71e03ac7da71796f0a3c61c8049ad7bacbefbec7a5934b306ad0b70906bb323f]
	I0704 02:43:03.990402 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:03.995011 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:03.998649 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kube-scheduler Namespaces:[]}
	I0704 02:43:03.998723 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0704 02:43:04.050731 1461358 cri.go:89] found id: "50f1aed2fd69db9f1469ae8c509e044f5883a8af17b914e3ccea66e5159e538c"
	I0704 02:43:04.050754 1461358 cri.go:89] found id: "c77b76d8ee8e90997b74a44444dda1ceb1606c1bcac6e2e897f099aefbae9ca5"
	I0704 02:43:04.050760 1461358 cri.go:89] found id: ""
	I0704 02:43:04.050768 1461358 logs.go:276] 2 containers: [50f1aed2fd69db9f1469ae8c509e044f5883a8af17b914e3ccea66e5159e538c c77b76d8ee8e90997b74a44444dda1ceb1606c1bcac6e2e897f099aefbae9ca5]
	I0704 02:43:04.050847 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.054840 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.058996 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kube-proxy Namespaces:[]}
	I0704 02:43:04.059074 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0704 02:43:04.110393 1461358 cri.go:89] found id: "c87928a87c1d3d7d27dafbbb731a758e72e428732de7dc39c2c952505e65234f"
	I0704 02:43:04.110418 1461358 cri.go:89] found id: "e36274ec135b818053c9b1ddec36771b772a75851df732aa1a4fa768cc912ac8"
	I0704 02:43:04.110423 1461358 cri.go:89] found id: ""
	I0704 02:43:04.110431 1461358 logs.go:276] 2 containers: [c87928a87c1d3d7d27dafbbb731a758e72e428732de7dc39c2c952505e65234f e36274ec135b818053c9b1ddec36771b772a75851df732aa1a4fa768cc912ac8]
	I0704 02:43:04.110489 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.115777 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.119173 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kube-controller-manager Namespaces:[]}
	I0704 02:43:04.119258 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0704 02:43:04.162339 1461358 cri.go:89] found id: "83d7189811f75b1bd3a98867c17d6570b9bfcd834d612431e0cb4e1fab3f4cb7"
	I0704 02:43:04.162362 1461358 cri.go:89] found id: "c1881aed94a4bf9ecfebcec75304d41e97b25974bd8a653d24b470be7bbaa222"
	I0704 02:43:04.162367 1461358 cri.go:89] found id: ""
	I0704 02:43:04.162374 1461358 logs.go:276] 2 containers: [83d7189811f75b1bd3a98867c17d6570b9bfcd834d612431e0cb4e1fab3f4cb7 c1881aed94a4bf9ecfebcec75304d41e97b25974bd8a653d24b470be7bbaa222]
	I0704 02:43:04.162433 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.166162 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.169642 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kindnet Namespaces:[]}
	I0704 02:43:04.169732 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0704 02:43:04.219361 1461358 cri.go:89] found id: "e8761557ad0a992859e02050fbcb50c867e6456f15d43a1922860d58c918475e"
	I0704 02:43:04.219379 1461358 cri.go:89] found id: "ff0d8a658cd4232702700190007d62520a4a45872a028529b9ed36fe6dc3549f"
	I0704 02:43:04.219384 1461358 cri.go:89] found id: ""
	I0704 02:43:04.219391 1461358 logs.go:276] 2 containers: [e8761557ad0a992859e02050fbcb50c867e6456f15d43a1922860d58c918475e ff0d8a658cd4232702700190007d62520a4a45872a028529b9ed36fe6dc3549f]
	I0704 02:43:04.219446 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.223078 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.226540 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kubernetes-dashboard Namespaces:[]}
	I0704 02:43:04.226613 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kubernetes-dashboard
	I0704 02:43:04.311937 1461358 cri.go:89] found id: "11d610a49bcd9d793ec0a3506ed0f5ea2c0c963cab7b6867ac5b304ba0ad0e3c"
	I0704 02:43:04.311999 1461358 cri.go:89] found id: ""
	I0704 02:43:04.312014 1461358 logs.go:276] 1 containers: [11d610a49bcd9d793ec0a3506ed0f5ea2c0c963cab7b6867ac5b304ba0ad0e3c]
	I0704 02:43:04.312084 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.315775 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:storage-provisioner Namespaces:[]}
	I0704 02:43:04.315884 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=storage-provisioner
	I0704 02:43:04.358152 1461358 cri.go:89] found id: "a4760ff354cafc5cfc98e0cf499f32eac6195eb5b19a09c3b9252ff1ed6937a2"
	I0704 02:43:04.358175 1461358 cri.go:89] found id: "946bdba0449bb980124c9355f39e4f28729385ded40518e3f52317b9f372be0a"
	I0704 02:43:04.358180 1461358 cri.go:89] found id: ""
	I0704 02:43:04.358187 1461358 logs.go:276] 2 containers: [a4760ff354cafc5cfc98e0cf499f32eac6195eb5b19a09c3b9252ff1ed6937a2 946bdba0449bb980124c9355f39e4f28729385ded40518e3f52317b9f372be0a]
	I0704 02:43:04.358244 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.362325 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.365858 1461358 logs.go:123] Gathering logs for kindnet [e8761557ad0a992859e02050fbcb50c867e6456f15d43a1922860d58c918475e] ...
	I0704 02:43:04.365883 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 e8761557ad0a992859e02050fbcb50c867e6456f15d43a1922860d58c918475e"
	I0704 02:43:04.406466 1461358 logs.go:123] Gathering logs for kube-proxy [c87928a87c1d3d7d27dafbbb731a758e72e428732de7dc39c2c952505e65234f] ...
	I0704 02:43:04.406496 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 c87928a87c1d3d7d27dafbbb731a758e72e428732de7dc39c2c952505e65234f"
	I0704 02:43:04.446192 1461358 logs.go:123] Gathering logs for kube-controller-manager [83d7189811f75b1bd3a98867c17d6570b9bfcd834d612431e0cb4e1fab3f4cb7] ...
	I0704 02:43:04.446222 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 83d7189811f75b1bd3a98867c17d6570b9bfcd834d612431e0cb4e1fab3f4cb7"
	I0704 02:43:04.501463 1461358 logs.go:123] Gathering logs for kube-controller-manager [c1881aed94a4bf9ecfebcec75304d41e97b25974bd8a653d24b470be7bbaa222] ...
	I0704 02:43:04.501496 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 c1881aed94a4bf9ecfebcec75304d41e97b25974bd8a653d24b470be7bbaa222"
	I0704 02:43:04.573576 1461358 logs.go:123] Gathering logs for kubernetes-dashboard [11d610a49bcd9d793ec0a3506ed0f5ea2c0c963cab7b6867ac5b304ba0ad0e3c] ...
	I0704 02:43:04.573616 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 11d610a49bcd9d793ec0a3506ed0f5ea2c0c963cab7b6867ac5b304ba0ad0e3c"
	I0704 02:43:04.623005 1461358 logs.go:123] Gathering logs for storage-provisioner [a4760ff354cafc5cfc98e0cf499f32eac6195eb5b19a09c3b9252ff1ed6937a2] ...
	I0704 02:43:04.623033 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 a4760ff354cafc5cfc98e0cf499f32eac6195eb5b19a09c3b9252ff1ed6937a2"
	I0704 02:43:04.665925 1461358 logs.go:123] Gathering logs for container status ...
	I0704 02:43:04.665953 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0704 02:43:04.715865 1461358 logs.go:123] Gathering logs for kube-apiserver [60850fe005f0ab5cfc576b645d25d0daaadbd790e2fc616900504e76c1a1950d] ...
	I0704 02:43:04.715897 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 60850fe005f0ab5cfc576b645d25d0daaadbd790e2fc616900504e76c1a1950d"
	I0704 02:43:04.781858 1461358 logs.go:123] Gathering logs for coredns [58c28bfb15459945dd5649b964ec42a50cc8cbdc4feda5222594cdf64bf0bc09] ...
	I0704 02:43:04.781896 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 58c28bfb15459945dd5649b964ec42a50cc8cbdc4feda5222594cdf64bf0bc09"
	I0704 02:43:04.827261 1461358 logs.go:123] Gathering logs for kube-apiserver [186d8c36c000cb07d64c9a71d61cc5b1cf836660ec112c7bc99feeea6eb45bd1] ...
	I0704 02:43:04.827297 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 186d8c36c000cb07d64c9a71d61cc5b1cf836660ec112c7bc99feeea6eb45bd1"
	I0704 02:43:04.884061 1461358 logs.go:123] Gathering logs for etcd [1332e9cb16fba0754aab5a90d7b820adf8d9a1d1c927c7ca3c4030879eae0e39] ...
	I0704 02:43:04.884099 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 1332e9cb16fba0754aab5a90d7b820adf8d9a1d1c927c7ca3c4030879eae0e39"
	I0704 02:43:04.956000 1461358 logs.go:123] Gathering logs for kubelet ...
	I0704 02:43:04.956029 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	W0704 02:43:05.020230 1461358 logs.go:138] Found kubelet problem: Jul 04 02:37:43 old-k8s-version-610521 kubelet[660]: E0704 02:37:43.141075     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:05.020461 1461358 logs.go:138] Found kubelet problem: Jul 04 02:37:43 old-k8s-version-610521 kubelet[660]: E0704 02:37:43.711599     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.023925 1461358 logs.go:138] Found kubelet problem: Jul 04 02:37:58 old-k8s-version-610521 kubelet[660]: E0704 02:37:58.532033     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:05.024354 1461358 logs.go:138] Found kubelet problem: Jul 04 02:37:58 old-k8s-version-610521 kubelet[660]: E0704 02:37:58.584292     660 reflector.go:138] object-"kubernetes-dashboard"/"kubernetes-dashboard-token-hbf2h": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets "kubernetes-dashboard-token-hbf2h" is forbidden: User "system:node:old-k8s-version-610521" cannot list resource "secrets" in API group "" in the namespace "kubernetes-dashboard": no relationship found between node 'old-k8s-version-610521' and this object
	W0704 02:43:05.026261 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:05 old-k8s-version-610521 kubelet[660]: E0704 02:38:05.812067     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 10s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.026595 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:06 old-k8s-version-610521 kubelet[660]: E0704 02:38:06.816842     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 10s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.026958 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:07 old-k8s-version-610521 kubelet[660]: E0704 02:38:07.834643     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 10s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.027151 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:09 old-k8s-version-610521 kubelet[660]: E0704 02:38:09.521695     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.027992 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:14 old-k8s-version-610521 kubelet[660]: E0704 02:38:14.843471     660 pod_workers.go:191] Error syncing pod c0dd39a9-a4fa-4097-8061-f4d356bedb93 ("storage-provisioner_kube-system(c0dd39a9-a4fa-4097-8061-f4d356bedb93)"), skipping: failed to "StartContainer" for "storage-provisioner" with CrashLoopBackOff: "back-off 10s restarting failed container=storage-provisioner pod=storage-provisioner_kube-system(c0dd39a9-a4fa-4097-8061-f4d356bedb93)"
	W0704 02:43:05.028592 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:19 old-k8s-version-610521 kubelet[660]: E0704 02:38:19.869765     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.031051 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:21 old-k8s-version-610521 kubelet[660]: E0704 02:38:21.530110     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:05.031888 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:26 old-k8s-version-610521 kubelet[660]: E0704 02:38:26.907175     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.032076 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:35 old-k8s-version-610521 kubelet[660]: E0704 02:38:35.521777     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.032408 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:39 old-k8s-version-610521 kubelet[660]: E0704 02:38:39.521443     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.032600 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:50 old-k8s-version-610521 kubelet[660]: E0704 02:38:50.534167     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.033201 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:53 old-k8s-version-610521 kubelet[660]: E0704 02:38:53.957826     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.033530 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:56 old-k8s-version-610521 kubelet[660]: E0704 02:38:56.906952     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.033751 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:01 old-k8s-version-610521 kubelet[660]: E0704 02:39:01.521480     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.034093 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:09 old-k8s-version-610521 kubelet[660]: E0704 02:39:09.529008     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.036785 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:13 old-k8s-version-610521 kubelet[660]: E0704 02:39:13.533305     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:05.037129 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:22 old-k8s-version-610521 kubelet[660]: E0704 02:39:22.524726     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.037319 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:25 old-k8s-version-610521 kubelet[660]: E0704 02:39:25.521615     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.037910 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:37 old-k8s-version-610521 kubelet[660]: E0704 02:39:37.096421     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.038096 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:38 old-k8s-version-610521 kubelet[660]: E0704 02:39:38.521726     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.038425 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:46 old-k8s-version-610521 kubelet[660]: E0704 02:39:46.906948     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.038609 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:53 old-k8s-version-610521 kubelet[660]: E0704 02:39:53.521738     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.038950 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:58 old-k8s-version-610521 kubelet[660]: E0704 02:39:58.521030     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.039136 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:06 old-k8s-version-610521 kubelet[660]: E0704 02:40:06.525857     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.039470 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:12 old-k8s-version-610521 kubelet[660]: E0704 02:40:12.520985     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.039670 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:19 old-k8s-version-610521 kubelet[660]: E0704 02:40:19.521316     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.040059 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:23 old-k8s-version-610521 kubelet[660]: E0704 02:40:23.521554     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.040255 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:32 old-k8s-version-610521 kubelet[660]: E0704 02:40:32.521292     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.040595 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:38 old-k8s-version-610521 kubelet[660]: E0704 02:40:38.521059     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.043060 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:44 old-k8s-version-610521 kubelet[660]: E0704 02:40:44.529636     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:05.043410 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:53 old-k8s-version-610521 kubelet[660]: E0704 02:40:53.523653     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.043603 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:57 old-k8s-version-610521 kubelet[660]: E0704 02:40:57.521996     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.044200 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:07 old-k8s-version-610521 kubelet[660]: E0704 02:41:07.423946     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.044391 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:09 old-k8s-version-610521 kubelet[660]: E0704 02:41:09.521565     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.044726 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:16 old-k8s-version-610521 kubelet[660]: E0704 02:41:16.907385     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.044912 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:21 old-k8s-version-610521 kubelet[660]: E0704 02:41:21.527466     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.045240 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:31 old-k8s-version-610521 kubelet[660]: E0704 02:41:31.524981     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.045425 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:32 old-k8s-version-610521 kubelet[660]: E0704 02:41:32.521592     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.045754 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:44 old-k8s-version-610521 kubelet[660]: E0704 02:41:44.521996     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.045938 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:44 old-k8s-version-610521 kubelet[660]: E0704 02:41:44.522125     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.046123 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:57 old-k8s-version-610521 kubelet[660]: E0704 02:41:57.521344     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.046456 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:57 old-k8s-version-610521 kubelet[660]: E0704 02:41:57.522153     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.046772 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:09 old-k8s-version-610521 kubelet[660]: E0704 02:42:09.521551     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.046977 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:09 old-k8s-version-610521 kubelet[660]: E0704 02:42:09.522234     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.047310 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:22 old-k8s-version-610521 kubelet[660]: E0704 02:42:22.521701     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.047501 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:22 old-k8s-version-610521 kubelet[660]: E0704 02:42:22.521892     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.047688 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:33 old-k8s-version-610521 kubelet[660]: E0704 02:42:33.521599     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.048022 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:36 old-k8s-version-610521 kubelet[660]: E0704 02:42:36.521477     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.048343 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521448     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.048573 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521725     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.048764 1461358 logs.go:138] Found kubelet problem: Jul 04 02:43:00 old-k8s-version-610521 kubelet[660]: E0704 02:43:00.521165     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.049095 1461358 logs.go:138] Found kubelet problem: Jul 04 02:43:01 old-k8s-version-610521 kubelet[660]: E0704 02:43:01.526471     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	I0704 02:43:05.049108 1461358 logs.go:123] Gathering logs for describe nodes ...
	I0704 02:43:05.049123 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.20.0/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	I0704 02:43:05.222743 1461358 logs.go:123] Gathering logs for coredns [71e03ac7da71796f0a3c61c8049ad7bacbefbec7a5934b306ad0b70906bb323f] ...
	I0704 02:43:05.222778 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 71e03ac7da71796f0a3c61c8049ad7bacbefbec7a5934b306ad0b70906bb323f"
	I0704 02:43:05.264679 1461358 logs.go:123] Gathering logs for kube-scheduler [50f1aed2fd69db9f1469ae8c509e044f5883a8af17b914e3ccea66e5159e538c] ...
	I0704 02:43:05.264706 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 50f1aed2fd69db9f1469ae8c509e044f5883a8af17b914e3ccea66e5159e538c"
	I0704 02:43:05.303780 1461358 logs.go:123] Gathering logs for kube-scheduler [c77b76d8ee8e90997b74a44444dda1ceb1606c1bcac6e2e897f099aefbae9ca5] ...
	I0704 02:43:05.303810 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 c77b76d8ee8e90997b74a44444dda1ceb1606c1bcac6e2e897f099aefbae9ca5"
	I0704 02:43:05.366573 1461358 logs.go:123] Gathering logs for kube-proxy [e36274ec135b818053c9b1ddec36771b772a75851df732aa1a4fa768cc912ac8] ...
	I0704 02:43:05.366613 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 e36274ec135b818053c9b1ddec36771b772a75851df732aa1a4fa768cc912ac8"
	I0704 02:43:05.405377 1461358 logs.go:123] Gathering logs for kindnet [ff0d8a658cd4232702700190007d62520a4a45872a028529b9ed36fe6dc3549f] ...
	I0704 02:43:05.405407 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 ff0d8a658cd4232702700190007d62520a4a45872a028529b9ed36fe6dc3549f"
	I0704 02:43:05.444053 1461358 logs.go:123] Gathering logs for storage-provisioner [946bdba0449bb980124c9355f39e4f28729385ded40518e3f52317b9f372be0a] ...
	I0704 02:43:05.444080 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 946bdba0449bb980124c9355f39e4f28729385ded40518e3f52317b9f372be0a"
	I0704 02:43:05.481109 1461358 logs.go:123] Gathering logs for dmesg ...
	I0704 02:43:05.481135 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0704 02:43:05.499896 1461358 logs.go:123] Gathering logs for etcd [8a8399e4a7b68b69943912a3d46d948be98f305a699513a7a8a124736077d249] ...
	I0704 02:43:05.499929 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8a8399e4a7b68b69943912a3d46d948be98f305a699513a7a8a124736077d249"
	I0704 02:43:05.550065 1461358 logs.go:123] Gathering logs for containerd ...
	I0704 02:43:05.550096 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u containerd -n 400"
	I0704 02:43:05.609341 1461358 out.go:304] Setting ErrFile to fd 2...
	I0704 02:43:05.609373 1461358 out.go:338] TERM=,COLORTERM=, which probably does not support color
	W0704 02:43:05.609437 1461358 out.go:239] X Problems detected in kubelet:
	X Problems detected in kubelet:
	W0704 02:43:05.609448 1461358 out.go:239]   Jul 04 02:42:36 old-k8s-version-610521 kubelet[660]: E0704 02:42:36.521477     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	  Jul 04 02:42:36 old-k8s-version-610521 kubelet[660]: E0704 02:42:36.521477     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.609464 1461358 out.go:239]   Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521448     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	  Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521448     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.609474 1461358 out.go:239]   Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521725     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	  Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521725     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.609489 1461358 out.go:239]   Jul 04 02:43:00 old-k8s-version-610521 kubelet[660]: E0704 02:43:00.521165     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	  Jul 04 02:43:00 old-k8s-version-610521 kubelet[660]: E0704 02:43:00.521165     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.609497 1461358 out.go:239]   Jul 04 02:43:01 old-k8s-version-610521 kubelet[660]: E0704 02:43:01.526471     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	  Jul 04 02:43:01 old-k8s-version-610521 kubelet[660]: E0704 02:43:01.526471     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	I0704 02:43:05.609503 1461358 out.go:304] Setting ErrFile to fd 2...
	I0704 02:43:05.609509 1461358 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 02:43:15.610562 1461358 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0704 02:43:15.631593 1461358 api_server.go:72] duration metric: took 5m49.987447376s to wait for apiserver process to appear ...
	I0704 02:43:15.631618 1461358 api_server.go:88] waiting for apiserver healthz status ...
	I0704 02:43:15.631652 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kube-apiserver Namespaces:[]}
	I0704 02:43:15.631712 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0704 02:43:15.680535 1461358 cri.go:89] found id: "60850fe005f0ab5cfc576b645d25d0daaadbd790e2fc616900504e76c1a1950d"
	I0704 02:43:15.680560 1461358 cri.go:89] found id: "186d8c36c000cb07d64c9a71d61cc5b1cf836660ec112c7bc99feeea6eb45bd1"
	I0704 02:43:15.680565 1461358 cri.go:89] found id: ""
	I0704 02:43:15.680572 1461358 logs.go:276] 2 containers: [60850fe005f0ab5cfc576b645d25d0daaadbd790e2fc616900504e76c1a1950d 186d8c36c000cb07d64c9a71d61cc5b1cf836660ec112c7bc99feeea6eb45bd1]
	I0704 02:43:15.680629 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.684592 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.688803 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:etcd Namespaces:[]}
	I0704 02:43:15.688888 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0704 02:43:15.731139 1461358 cri.go:89] found id: "8a8399e4a7b68b69943912a3d46d948be98f305a699513a7a8a124736077d249"
	I0704 02:43:15.731166 1461358 cri.go:89] found id: "1332e9cb16fba0754aab5a90d7b820adf8d9a1d1c927c7ca3c4030879eae0e39"
	I0704 02:43:15.731180 1461358 cri.go:89] found id: ""
	I0704 02:43:15.731188 1461358 logs.go:276] 2 containers: [8a8399e4a7b68b69943912a3d46d948be98f305a699513a7a8a124736077d249 1332e9cb16fba0754aab5a90d7b820adf8d9a1d1c927c7ca3c4030879eae0e39]
	I0704 02:43:15.731247 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.735872 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.739703 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:coredns Namespaces:[]}
	I0704 02:43:15.739775 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0704 02:43:15.830022 1461358 cri.go:89] found id: "58c28bfb15459945dd5649b964ec42a50cc8cbdc4feda5222594cdf64bf0bc09"
	I0704 02:43:15.830045 1461358 cri.go:89] found id: "71e03ac7da71796f0a3c61c8049ad7bacbefbec7a5934b306ad0b70906bb323f"
	I0704 02:43:15.830050 1461358 cri.go:89] found id: ""
	I0704 02:43:15.830057 1461358 logs.go:276] 2 containers: [58c28bfb15459945dd5649b964ec42a50cc8cbdc4feda5222594cdf64bf0bc09 71e03ac7da71796f0a3c61c8049ad7bacbefbec7a5934b306ad0b70906bb323f]
	I0704 02:43:15.830115 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.834360 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.838317 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kube-scheduler Namespaces:[]}
	I0704 02:43:15.838392 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0704 02:43:15.904704 1461358 cri.go:89] found id: "50f1aed2fd69db9f1469ae8c509e044f5883a8af17b914e3ccea66e5159e538c"
	I0704 02:43:15.904768 1461358 cri.go:89] found id: "c77b76d8ee8e90997b74a44444dda1ceb1606c1bcac6e2e897f099aefbae9ca5"
	I0704 02:43:15.904786 1461358 cri.go:89] found id: ""
	I0704 02:43:15.904811 1461358 logs.go:276] 2 containers: [50f1aed2fd69db9f1469ae8c509e044f5883a8af17b914e3ccea66e5159e538c c77b76d8ee8e90997b74a44444dda1ceb1606c1bcac6e2e897f099aefbae9ca5]
	I0704 02:43:15.904910 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.910048 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.916437 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kube-proxy Namespaces:[]}
	I0704 02:43:15.916611 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0704 02:43:15.983932 1461358 cri.go:89] found id: "c87928a87c1d3d7d27dafbbb731a758e72e428732de7dc39c2c952505e65234f"
	I0704 02:43:15.984007 1461358 cri.go:89] found id: "e36274ec135b818053c9b1ddec36771b772a75851df732aa1a4fa768cc912ac8"
	I0704 02:43:15.984032 1461358 cri.go:89] found id: ""
	I0704 02:43:15.984055 1461358 logs.go:276] 2 containers: [c87928a87c1d3d7d27dafbbb731a758e72e428732de7dc39c2c952505e65234f e36274ec135b818053c9b1ddec36771b772a75851df732aa1a4fa768cc912ac8]
	I0704 02:43:15.984147 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.988958 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.993805 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kube-controller-manager Namespaces:[]}
	I0704 02:43:15.993928 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0704 02:43:16.064713 1461358 cri.go:89] found id: "83d7189811f75b1bd3a98867c17d6570b9bfcd834d612431e0cb4e1fab3f4cb7"
	I0704 02:43:16.064789 1461358 cri.go:89] found id: "c1881aed94a4bf9ecfebcec75304d41e97b25974bd8a653d24b470be7bbaa222"
	I0704 02:43:16.064809 1461358 cri.go:89] found id: ""
	I0704 02:43:16.064834 1461358 logs.go:276] 2 containers: [83d7189811f75b1bd3a98867c17d6570b9bfcd834d612431e0cb4e1fab3f4cb7 c1881aed94a4bf9ecfebcec75304d41e97b25974bd8a653d24b470be7bbaa222]
	I0704 02:43:16.064924 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:16.069858 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:16.074563 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kindnet Namespaces:[]}
	I0704 02:43:16.074686 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0704 02:43:16.134085 1461358 cri.go:89] found id: "e8761557ad0a992859e02050fbcb50c867e6456f15d43a1922860d58c918475e"
	I0704 02:43:16.134158 1461358 cri.go:89] found id: "ff0d8a658cd4232702700190007d62520a4a45872a028529b9ed36fe6dc3549f"
	I0704 02:43:16.134178 1461358 cri.go:89] found id: ""
	I0704 02:43:16.134201 1461358 logs.go:276] 2 containers: [e8761557ad0a992859e02050fbcb50c867e6456f15d43a1922860d58c918475e ff0d8a658cd4232702700190007d62520a4a45872a028529b9ed36fe6dc3549f]
	I0704 02:43:16.134291 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:16.138505 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:16.142556 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:storage-provisioner Namespaces:[]}
	I0704 02:43:16.142679 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=storage-provisioner
	I0704 02:43:16.206973 1461358 cri.go:89] found id: "a4760ff354cafc5cfc98e0cf499f32eac6195eb5b19a09c3b9252ff1ed6937a2"
	I0704 02:43:16.207048 1461358 cri.go:89] found id: "946bdba0449bb980124c9355f39e4f28729385ded40518e3f52317b9f372be0a"
	I0704 02:43:16.207067 1461358 cri.go:89] found id: ""
	I0704 02:43:16.207093 1461358 logs.go:276] 2 containers: [a4760ff354cafc5cfc98e0cf499f32eac6195eb5b19a09c3b9252ff1ed6937a2 946bdba0449bb980124c9355f39e4f28729385ded40518e3f52317b9f372be0a]
	I0704 02:43:16.207180 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:16.215336 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:16.219547 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kubernetes-dashboard Namespaces:[]}
	I0704 02:43:16.219673 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kubernetes-dashboard
	I0704 02:43:16.270945 1461358 cri.go:89] found id: "11d610a49bcd9d793ec0a3506ed0f5ea2c0c963cab7b6867ac5b304ba0ad0e3c"
	I0704 02:43:16.271020 1461358 cri.go:89] found id: ""
	I0704 02:43:16.271058 1461358 logs.go:276] 1 containers: [11d610a49bcd9d793ec0a3506ed0f5ea2c0c963cab7b6867ac5b304ba0ad0e3c]
	I0704 02:43:16.271135 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:16.275170 1461358 logs.go:123] Gathering logs for kube-apiserver [186d8c36c000cb07d64c9a71d61cc5b1cf836660ec112c7bc99feeea6eb45bd1] ...
	I0704 02:43:16.275276 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 186d8c36c000cb07d64c9a71d61cc5b1cf836660ec112c7bc99feeea6eb45bd1"
	I0704 02:43:16.355677 1461358 logs.go:123] Gathering logs for etcd [1332e9cb16fba0754aab5a90d7b820adf8d9a1d1c927c7ca3c4030879eae0e39] ...
	I0704 02:43:16.355755 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 1332e9cb16fba0754aab5a90d7b820adf8d9a1d1c927c7ca3c4030879eae0e39"
	I0704 02:43:16.415232 1461358 logs.go:123] Gathering logs for coredns [58c28bfb15459945dd5649b964ec42a50cc8cbdc4feda5222594cdf64bf0bc09] ...
	I0704 02:43:16.415389 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 58c28bfb15459945dd5649b964ec42a50cc8cbdc4feda5222594cdf64bf0bc09"
	I0704 02:43:16.481996 1461358 logs.go:123] Gathering logs for kube-scheduler [50f1aed2fd69db9f1469ae8c509e044f5883a8af17b914e3ccea66e5159e538c] ...
	I0704 02:43:16.482071 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 50f1aed2fd69db9f1469ae8c509e044f5883a8af17b914e3ccea66e5159e538c"
	I0704 02:43:16.547129 1461358 logs.go:123] Gathering logs for kube-scheduler [c77b76d8ee8e90997b74a44444dda1ceb1606c1bcac6e2e897f099aefbae9ca5] ...
	I0704 02:43:16.547207 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 c77b76d8ee8e90997b74a44444dda1ceb1606c1bcac6e2e897f099aefbae9ca5"
	I0704 02:43:16.606828 1461358 logs.go:123] Gathering logs for kube-proxy [c87928a87c1d3d7d27dafbbb731a758e72e428732de7dc39c2c952505e65234f] ...
	I0704 02:43:16.606995 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 c87928a87c1d3d7d27dafbbb731a758e72e428732de7dc39c2c952505e65234f"
	I0704 02:43:16.673552 1461358 logs.go:123] Gathering logs for kube-controller-manager [83d7189811f75b1bd3a98867c17d6570b9bfcd834d612431e0cb4e1fab3f4cb7] ...
	I0704 02:43:16.673619 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 83d7189811f75b1bd3a98867c17d6570b9bfcd834d612431e0cb4e1fab3f4cb7"
	I0704 02:43:16.783832 1461358 logs.go:123] Gathering logs for dmesg ...
	I0704 02:43:16.783909 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0704 02:43:16.817561 1461358 logs.go:123] Gathering logs for kindnet [ff0d8a658cd4232702700190007d62520a4a45872a028529b9ed36fe6dc3549f] ...
	I0704 02:43:16.817602 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 ff0d8a658cd4232702700190007d62520a4a45872a028529b9ed36fe6dc3549f"
	I0704 02:43:16.871781 1461358 logs.go:123] Gathering logs for etcd [8a8399e4a7b68b69943912a3d46d948be98f305a699513a7a8a124736077d249] ...
	I0704 02:43:16.871853 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8a8399e4a7b68b69943912a3d46d948be98f305a699513a7a8a124736077d249"
	I0704 02:43:16.932739 1461358 logs.go:123] Gathering logs for coredns [71e03ac7da71796f0a3c61c8049ad7bacbefbec7a5934b306ad0b70906bb323f] ...
	I0704 02:43:16.932780 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 71e03ac7da71796f0a3c61c8049ad7bacbefbec7a5934b306ad0b70906bb323f"
	I0704 02:43:16.990648 1461358 logs.go:123] Gathering logs for kube-proxy [e36274ec135b818053c9b1ddec36771b772a75851df732aa1a4fa768cc912ac8] ...
	I0704 02:43:16.990719 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 e36274ec135b818053c9b1ddec36771b772a75851df732aa1a4fa768cc912ac8"
	I0704 02:43:17.056592 1461358 logs.go:123] Gathering logs for storage-provisioner [a4760ff354cafc5cfc98e0cf499f32eac6195eb5b19a09c3b9252ff1ed6937a2] ...
	I0704 02:43:17.056668 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 a4760ff354cafc5cfc98e0cf499f32eac6195eb5b19a09c3b9252ff1ed6937a2"
	I0704 02:43:17.110694 1461358 logs.go:123] Gathering logs for containerd ...
	I0704 02:43:17.110722 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u containerd -n 400"
	I0704 02:43:17.187412 1461358 logs.go:123] Gathering logs for container status ...
	I0704 02:43:17.187449 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0704 02:43:17.245188 1461358 logs.go:123] Gathering logs for describe nodes ...
	I0704 02:43:17.245216 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.20.0/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	I0704 02:43:17.418440 1461358 logs.go:123] Gathering logs for kube-apiserver [60850fe005f0ab5cfc576b645d25d0daaadbd790e2fc616900504e76c1a1950d] ...
	I0704 02:43:17.418477 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 60850fe005f0ab5cfc576b645d25d0daaadbd790e2fc616900504e76c1a1950d"
	I0704 02:43:17.488189 1461358 logs.go:123] Gathering logs for kube-controller-manager [c1881aed94a4bf9ecfebcec75304d41e97b25974bd8a653d24b470be7bbaa222] ...
	I0704 02:43:17.488225 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 c1881aed94a4bf9ecfebcec75304d41e97b25974bd8a653d24b470be7bbaa222"
	I0704 02:43:17.584036 1461358 logs.go:123] Gathering logs for kindnet [e8761557ad0a992859e02050fbcb50c867e6456f15d43a1922860d58c918475e] ...
	I0704 02:43:17.584076 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 e8761557ad0a992859e02050fbcb50c867e6456f15d43a1922860d58c918475e"
	I0704 02:43:17.661684 1461358 logs.go:123] Gathering logs for kubernetes-dashboard [11d610a49bcd9d793ec0a3506ed0f5ea2c0c963cab7b6867ac5b304ba0ad0e3c] ...
	I0704 02:43:17.661717 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 11d610a49bcd9d793ec0a3506ed0f5ea2c0c963cab7b6867ac5b304ba0ad0e3c"
	I0704 02:43:17.713313 1461358 logs.go:123] Gathering logs for kubelet ...
	I0704 02:43:17.713341 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	W0704 02:43:17.778411 1461358 logs.go:138] Found kubelet problem: Jul 04 02:37:43 old-k8s-version-610521 kubelet[660]: E0704 02:37:43.141075     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:17.778619 1461358 logs.go:138] Found kubelet problem: Jul 04 02:37:43 old-k8s-version-610521 kubelet[660]: E0704 02:37:43.711599     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.784363 1461358 logs.go:138] Found kubelet problem: Jul 04 02:37:58 old-k8s-version-610521 kubelet[660]: E0704 02:37:58.532033     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:17.784798 1461358 logs.go:138] Found kubelet problem: Jul 04 02:37:58 old-k8s-version-610521 kubelet[660]: E0704 02:37:58.584292     660 reflector.go:138] object-"kubernetes-dashboard"/"kubernetes-dashboard-token-hbf2h": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets "kubernetes-dashboard-token-hbf2h" is forbidden: User "system:node:old-k8s-version-610521" cannot list resource "secrets" in API group "" in the namespace "kubernetes-dashboard": no relationship found between node 'old-k8s-version-610521' and this object
	W0704 02:43:17.786715 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:05 old-k8s-version-610521 kubelet[660]: E0704 02:38:05.812067     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 10s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.787057 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:06 old-k8s-version-610521 kubelet[660]: E0704 02:38:06.816842     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 10s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.787387 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:07 old-k8s-version-610521 kubelet[660]: E0704 02:38:07.834643     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 10s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.790934 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:09 old-k8s-version-610521 kubelet[660]: E0704 02:38:09.521695     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.791791 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:14 old-k8s-version-610521 kubelet[660]: E0704 02:38:14.843471     660 pod_workers.go:191] Error syncing pod c0dd39a9-a4fa-4097-8061-f4d356bedb93 ("storage-provisioner_kube-system(c0dd39a9-a4fa-4097-8061-f4d356bedb93)"), skipping: failed to "StartContainer" for "storage-provisioner" with CrashLoopBackOff: "back-off 10s restarting failed container=storage-provisioner pod=storage-provisioner_kube-system(c0dd39a9-a4fa-4097-8061-f4d356bedb93)"
	W0704 02:43:17.792387 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:19 old-k8s-version-610521 kubelet[660]: E0704 02:38:19.869765     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.794882 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:21 old-k8s-version-610521 kubelet[660]: E0704 02:38:21.530110     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:17.795684 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:26 old-k8s-version-610521 kubelet[660]: E0704 02:38:26.907175     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.795871 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:35 old-k8s-version-610521 kubelet[660]: E0704 02:38:35.521777     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.796248 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:39 old-k8s-version-610521 kubelet[660]: E0704 02:38:39.521443     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.796439 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:50 old-k8s-version-610521 kubelet[660]: E0704 02:38:50.534167     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.797043 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:53 old-k8s-version-610521 kubelet[660]: E0704 02:38:53.957826     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.797375 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:56 old-k8s-version-610521 kubelet[660]: E0704 02:38:56.906952     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.797563 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:01 old-k8s-version-610521 kubelet[660]: E0704 02:39:01.521480     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.801144 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:09 old-k8s-version-610521 kubelet[660]: E0704 02:39:09.529008     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.804640 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:13 old-k8s-version-610521 kubelet[660]: E0704 02:39:13.533305     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:17.804986 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:22 old-k8s-version-610521 kubelet[660]: E0704 02:39:22.524726     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.805175 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:25 old-k8s-version-610521 kubelet[660]: E0704 02:39:25.521615     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.805764 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:37 old-k8s-version-610521 kubelet[660]: E0704 02:39:37.096421     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.805950 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:38 old-k8s-version-610521 kubelet[660]: E0704 02:39:38.521726     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.806299 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:46 old-k8s-version-610521 kubelet[660]: E0704 02:39:46.906948     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.806486 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:53 old-k8s-version-610521 kubelet[660]: E0704 02:39:53.521738     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.806834 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:58 old-k8s-version-610521 kubelet[660]: E0704 02:39:58.521030     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.807024 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:06 old-k8s-version-610521 kubelet[660]: E0704 02:40:06.525857     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.807356 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:12 old-k8s-version-610521 kubelet[660]: E0704 02:40:12.520985     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.807553 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:19 old-k8s-version-610521 kubelet[660]: E0704 02:40:19.521316     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.807894 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:23 old-k8s-version-610521 kubelet[660]: E0704 02:40:23.521554     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.808081 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:32 old-k8s-version-610521 kubelet[660]: E0704 02:40:32.521292     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.808410 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:38 old-k8s-version-610521 kubelet[660]: E0704 02:40:38.521059     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.810894 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:44 old-k8s-version-610521 kubelet[660]: E0704 02:40:44.529636     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:17.811227 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:53 old-k8s-version-610521 kubelet[660]: E0704 02:40:53.523653     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.811415 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:57 old-k8s-version-610521 kubelet[660]: E0704 02:40:57.521996     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.812929 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:07 old-k8s-version-610521 kubelet[660]: E0704 02:41:07.423946     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.813137 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:09 old-k8s-version-610521 kubelet[660]: E0704 02:41:09.521565     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.813470 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:16 old-k8s-version-610521 kubelet[660]: E0704 02:41:16.907385     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.813671 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:21 old-k8s-version-610521 kubelet[660]: E0704 02:41:21.527466     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.814011 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:31 old-k8s-version-610521 kubelet[660]: E0704 02:41:31.524981     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.814198 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:32 old-k8s-version-610521 kubelet[660]: E0704 02:41:32.521592     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.814530 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:44 old-k8s-version-610521 kubelet[660]: E0704 02:41:44.521996     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.814716 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:44 old-k8s-version-610521 kubelet[660]: E0704 02:41:44.522125     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.814908 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:57 old-k8s-version-610521 kubelet[660]: E0704 02:41:57.521344     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.815237 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:57 old-k8s-version-610521 kubelet[660]: E0704 02:41:57.522153     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.815565 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:09 old-k8s-version-610521 kubelet[660]: E0704 02:42:09.521551     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.815765 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:09 old-k8s-version-610521 kubelet[660]: E0704 02:42:09.522234     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.816099 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:22 old-k8s-version-610521 kubelet[660]: E0704 02:42:22.521701     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.816335 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:22 old-k8s-version-610521 kubelet[660]: E0704 02:42:22.521892     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.816533 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:33 old-k8s-version-610521 kubelet[660]: E0704 02:42:33.521599     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.816863 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:36 old-k8s-version-610521 kubelet[660]: E0704 02:42:36.521477     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.817182 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521448     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.817381 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521725     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.817566 1461358 logs.go:138] Found kubelet problem: Jul 04 02:43:00 old-k8s-version-610521 kubelet[660]: E0704 02:43:00.521165     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.817907 1461358 logs.go:138] Found kubelet problem: Jul 04 02:43:01 old-k8s-version-610521 kubelet[660]: E0704 02:43:01.526471     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.818093 1461358 logs.go:138] Found kubelet problem: Jul 04 02:43:14 old-k8s-version-610521 kubelet[660]: E0704 02:43:14.521315     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.818422 1461358 logs.go:138] Found kubelet problem: Jul 04 02:43:16 old-k8s-version-610521 kubelet[660]: E0704 02:43:16.521001     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	I0704 02:43:17.818434 1461358 logs.go:123] Gathering logs for storage-provisioner [946bdba0449bb980124c9355f39e4f28729385ded40518e3f52317b9f372be0a] ...
	I0704 02:43:17.818451 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 946bdba0449bb980124c9355f39e4f28729385ded40518e3f52317b9f372be0a"
	I0704 02:43:17.868279 1461358 out.go:304] Setting ErrFile to fd 2...
	I0704 02:43:17.868304 1461358 out.go:338] TERM=,COLORTERM=, which probably does not support color
	W0704 02:43:17.868349 1461358 out.go:239] X Problems detected in kubelet:
	X Problems detected in kubelet:
	W0704 02:43:17.868360 1461358 out.go:239]   Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521725     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	  Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521725     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.868371 1461358 out.go:239]   Jul 04 02:43:00 old-k8s-version-610521 kubelet[660]: E0704 02:43:00.521165     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	  Jul 04 02:43:00 old-k8s-version-610521 kubelet[660]: E0704 02:43:00.521165     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.868378 1461358 out.go:239]   Jul 04 02:43:01 old-k8s-version-610521 kubelet[660]: E0704 02:43:01.526471     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	  Jul 04 02:43:01 old-k8s-version-610521 kubelet[660]: E0704 02:43:01.526471     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.868384 1461358 out.go:239]   Jul 04 02:43:14 old-k8s-version-610521 kubelet[660]: E0704 02:43:14.521315     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	  Jul 04 02:43:14 old-k8s-version-610521 kubelet[660]: E0704 02:43:14.521315     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.868394 1461358 out.go:239]   Jul 04 02:43:16 old-k8s-version-610521 kubelet[660]: E0704 02:43:16.521001     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	  Jul 04 02:43:16 old-k8s-version-610521 kubelet[660]: E0704 02:43:16.521001     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	I0704 02:43:17.868415 1461358 out.go:304] Setting ErrFile to fd 2...
	I0704 02:43:17.868420 1461358 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 02:43:27.869804 1461358 api_server.go:253] Checking apiserver healthz at https://192.168.76.2:8443/healthz ...
	I0704 02:43:27.879007 1461358 api_server.go:279] https://192.168.76.2:8443/healthz returned 200:
	ok
	I0704 02:43:27.880655 1461358 out.go:177] 
	W0704 02:43:27.882680 1461358 out.go:239] X Exiting due to K8S_UNHEALTHY_CONTROL_PLANE: wait 6m0s for node: wait for healthy API server: controlPlane never updated to v1.20.0
	X Exiting due to K8S_UNHEALTHY_CONTROL_PLANE: wait 6m0s for node: wait for healthy API server: controlPlane never updated to v1.20.0
	W0704 02:43:27.882719 1461358 out.go:239] * Suggestion: Control Plane could not update, try minikube delete --all --purge
	* Suggestion: Control Plane could not update, try minikube delete --all --purge
	W0704 02:43:27.882761 1461358 out.go:239] * Related issue: https://github.com/kubernetes/minikube/issues/11417
	* Related issue: https://github.com/kubernetes/minikube/issues/11417
	W0704 02:43:27.882767 1461358 out.go:239] * 
	* 
	W0704 02:43:27.883743 1461358 out.go:239] ╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯
	╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯
	I0704 02:43:27.886072 1461358 out.go:177] 

                                                
                                                
** /stderr **
start_stop_delete_test.go:259: failed to start minikube post-stop. args "out/minikube-linux-arm64 start -p old-k8s-version-610521 --memory=2200 --alsologtostderr --wait=true --kvm-network=default --kvm-qemu-uri=qemu:///system --disable-driver-mounts --keep-context=false --driver=docker  --container-runtime=containerd --kubernetes-version=v1.20.0": exit status 102
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestStartStop/group/old-k8s-version/serial/SecondStart]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect old-k8s-version-610521
helpers_test.go:235: (dbg) docker inspect old-k8s-version-610521:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "6b19a0ce0d7efcadeac5ade570541b9d9493c44c3e783591eb275777a1e7ac09",
	        "Created": "2024-07-04T02:34:02.401467351Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 1461552,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-07-04T02:37:18.236142428Z",
	            "FinishedAt": "2024-07-04T02:37:17.367684137Z"
	        },
	        "Image": "sha256:fe62b5a5301065dd92924d274286e0d1b2227c557eb51c213d07169631b2b3f7",
	        "ResolvConfPath": "/var/lib/docker/containers/6b19a0ce0d7efcadeac5ade570541b9d9493c44c3e783591eb275777a1e7ac09/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/6b19a0ce0d7efcadeac5ade570541b9d9493c44c3e783591eb275777a1e7ac09/hostname",
	        "HostsPath": "/var/lib/docker/containers/6b19a0ce0d7efcadeac5ade570541b9d9493c44c3e783591eb275777a1e7ac09/hosts",
	        "LogPath": "/var/lib/docker/containers/6b19a0ce0d7efcadeac5ade570541b9d9493c44c3e783591eb275777a1e7ac09/6b19a0ce0d7efcadeac5ade570541b9d9493c44c3e783591eb275777a1e7ac09-json.log",
	        "Name": "/old-k8s-version-610521",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "old-k8s-version-610521:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "old-k8s-version-610521",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/dbd49b5764621c044838e6a58b68d5d029e407c5468c853b19573da163d9c984-init/diff:/var/lib/docker/overlay2/04be1cfb4b9b173c47d5bff32a15bd2c62951348a7d8ba248dee1fc574bba292/diff",
	                "MergedDir": "/var/lib/docker/overlay2/dbd49b5764621c044838e6a58b68d5d029e407c5468c853b19573da163d9c984/merged",
	                "UpperDir": "/var/lib/docker/overlay2/dbd49b5764621c044838e6a58b68d5d029e407c5468c853b19573da163d9c984/diff",
	                "WorkDir": "/var/lib/docker/overlay2/dbd49b5764621c044838e6a58b68d5d029e407c5468c853b19573da163d9c984/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "old-k8s-version-610521",
	                "Source": "/var/lib/docker/volumes/old-k8s-version-610521/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "old-k8s-version-610521",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "old-k8s-version-610521",
	                "name.minikube.sigs.k8s.io": "old-k8s-version-610521",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "8434e79db3578b6085653c4592567b913864947c50666e387878d9b9a5f183a4",
	            "SandboxKey": "/var/run/docker/netns/8434e79db357",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34276"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34277"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34280"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34278"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "34279"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "old-k8s-version-610521": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.76.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:4c:02",
	                    "DriverOpts": null,
	                    "NetworkID": "044d32ec2ca8f8b4d8dead5cca2616982014cad8206ec8f00fbb74671c681d54",
	                    "EndpointID": "ea32bdc092ecdfac81189e541b66b83a4dd69190a874800466c3fa487fa50ad5",
	                    "Gateway": "192.168.76.1",
	                    "IPAddress": "192.168.76.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "old-k8s-version-610521",
	                        "6b19a0ce0d7e"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p old-k8s-version-610521 -n old-k8s-version-610521
helpers_test.go:244: <<< TestStartStop/group/old-k8s-version/serial/SecondStart FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestStartStop/group/old-k8s-version/serial/SecondStart]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p old-k8s-version-610521 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p old-k8s-version-610521 logs -n 25: (2.010608103s)
helpers_test.go:252: TestStartStop/group/old-k8s-version/serial/SecondStart logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| Command |                          Args                          |        Profile         |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| ssh     | -p bridge-104147 sudo                                  | bridge-104147          | jenkins | v1.33.1 | 04 Jul 24 02:34 UTC | 04 Jul 24 02:34 UTC |
	|         | containerd config dump                                 |                        |         |         |                     |                     |
	| ssh     | -p bridge-104147 sudo                                  | bridge-104147          | jenkins | v1.33.1 | 04 Jul 24 02:34 UTC |                     |
	|         | systemctl status crio --all                            |                        |         |         |                     |                     |
	|         | --full --no-pager                                      |                        |         |         |                     |                     |
	| ssh     | -p bridge-104147 sudo                                  | bridge-104147          | jenkins | v1.33.1 | 04 Jul 24 02:34 UTC | 04 Jul 24 02:34 UTC |
	|         | systemctl cat crio --no-pager                          |                        |         |         |                     |                     |
	| ssh     | -p bridge-104147 sudo find                             | bridge-104147          | jenkins | v1.33.1 | 04 Jul 24 02:34 UTC | 04 Jul 24 02:34 UTC |
	|         | /etc/crio -type f -exec sh -c                          |                        |         |         |                     |                     |
	|         | 'echo {}; cat {}' \;                                   |                        |         |         |                     |                     |
	| ssh     | -p bridge-104147 sudo crio                             | bridge-104147          | jenkins | v1.33.1 | 04 Jul 24 02:34 UTC | 04 Jul 24 02:34 UTC |
	|         | config                                                 |                        |         |         |                     |                     |
	| delete  | -p bridge-104147                                       | bridge-104147          | jenkins | v1.33.1 | 04 Jul 24 02:34 UTC | 04 Jul 24 02:34 UTC |
	| start   | -p no-preload-434675                                   | no-preload-434675      | jenkins | v1.33.1 | 04 Jul 24 02:34 UTC | 04 Jul 24 02:35 UTC |
	|         | --memory=2200                                          |                        |         |         |                     |                     |
	|         | --alsologtostderr                                      |                        |         |         |                     |                     |
	|         | --wait=true --preload=false                            |                        |         |         |                     |                     |
	|         | --driver=docker                                        |                        |         |         |                     |                     |
	|         | --container-runtime=containerd                         |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.30.2                           |                        |         |         |                     |                     |
	| addons  | enable metrics-server -p no-preload-434675             | no-preload-434675      | jenkins | v1.33.1 | 04 Jul 24 02:35 UTC | 04 Jul 24 02:35 UTC |
	|         | --images=MetricsServer=registry.k8s.io/echoserver:1.4  |                        |         |         |                     |                     |
	|         | --registries=MetricsServer=fake.domain                 |                        |         |         |                     |                     |
	| stop    | -p no-preload-434675                                   | no-preload-434675      | jenkins | v1.33.1 | 04 Jul 24 02:35 UTC | 04 Jul 24 02:35 UTC |
	|         | --alsologtostderr -v=3                                 |                        |         |         |                     |                     |
	| addons  | enable dashboard -p no-preload-434675                  | no-preload-434675      | jenkins | v1.33.1 | 04 Jul 24 02:35 UTC | 04 Jul 24 02:35 UTC |
	|         | --images=MetricsScraper=registry.k8s.io/echoserver:1.4 |                        |         |         |                     |                     |
	| start   | -p no-preload-434675                                   | no-preload-434675      | jenkins | v1.33.1 | 04 Jul 24 02:35 UTC | 04 Jul 24 02:40 UTC |
	|         | --memory=2200                                          |                        |         |         |                     |                     |
	|         | --alsologtostderr                                      |                        |         |         |                     |                     |
	|         | --wait=true --preload=false                            |                        |         |         |                     |                     |
	|         | --driver=docker                                        |                        |         |         |                     |                     |
	|         | --container-runtime=containerd                         |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.30.2                           |                        |         |         |                     |                     |
	| addons  | enable metrics-server -p old-k8s-version-610521        | old-k8s-version-610521 | jenkins | v1.33.1 | 04 Jul 24 02:37 UTC | 04 Jul 24 02:37 UTC |
	|         | --images=MetricsServer=registry.k8s.io/echoserver:1.4  |                        |         |         |                     |                     |
	|         | --registries=MetricsServer=fake.domain                 |                        |         |         |                     |                     |
	| stop    | -p old-k8s-version-610521                              | old-k8s-version-610521 | jenkins | v1.33.1 | 04 Jul 24 02:37 UTC | 04 Jul 24 02:37 UTC |
	|         | --alsologtostderr -v=3                                 |                        |         |         |                     |                     |
	| addons  | enable dashboard -p old-k8s-version-610521             | old-k8s-version-610521 | jenkins | v1.33.1 | 04 Jul 24 02:37 UTC | 04 Jul 24 02:37 UTC |
	|         | --images=MetricsScraper=registry.k8s.io/echoserver:1.4 |                        |         |         |                     |                     |
	| start   | -p old-k8s-version-610521                              | old-k8s-version-610521 | jenkins | v1.33.1 | 04 Jul 24 02:37 UTC |                     |
	|         | --memory=2200                                          |                        |         |         |                     |                     |
	|         | --alsologtostderr --wait=true                          |                        |         |         |                     |                     |
	|         | --kvm-network=default                                  |                        |         |         |                     |                     |
	|         | --kvm-qemu-uri=qemu:///system                          |                        |         |         |                     |                     |
	|         | --disable-driver-mounts                                |                        |         |         |                     |                     |
	|         | --keep-context=false                                   |                        |         |         |                     |                     |
	|         | --driver=docker                                        |                        |         |         |                     |                     |
	|         | --container-runtime=containerd                         |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.20.0                           |                        |         |         |                     |                     |
	| image   | no-preload-434675 image list                           | no-preload-434675      | jenkins | v1.33.1 | 04 Jul 24 02:40 UTC | 04 Jul 24 02:40 UTC |
	|         | --format=json                                          |                        |         |         |                     |                     |
	| pause   | -p no-preload-434675                                   | no-preload-434675      | jenkins | v1.33.1 | 04 Jul 24 02:40 UTC | 04 Jul 24 02:40 UTC |
	|         | --alsologtostderr -v=1                                 |                        |         |         |                     |                     |
	| unpause | -p no-preload-434675                                   | no-preload-434675      | jenkins | v1.33.1 | 04 Jul 24 02:40 UTC | 04 Jul 24 02:40 UTC |
	|         | --alsologtostderr -v=1                                 |                        |         |         |                     |                     |
	| delete  | -p no-preload-434675                                   | no-preload-434675      | jenkins | v1.33.1 | 04 Jul 24 02:40 UTC | 04 Jul 24 02:40 UTC |
	| delete  | -p no-preload-434675                                   | no-preload-434675      | jenkins | v1.33.1 | 04 Jul 24 02:40 UTC | 04 Jul 24 02:40 UTC |
	| start   | -p embed-certs-430955                                  | embed-certs-430955     | jenkins | v1.33.1 | 04 Jul 24 02:40 UTC | 04 Jul 24 02:42 UTC |
	|         | --memory=2200                                          |                        |         |         |                     |                     |
	|         | --alsologtostderr --wait=true                          |                        |         |         |                     |                     |
	|         | --embed-certs --driver=docker                          |                        |         |         |                     |                     |
	|         | --container-runtime=containerd                         |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.30.2                           |                        |         |         |                     |                     |
	| addons  | enable metrics-server -p embed-certs-430955            | embed-certs-430955     | jenkins | v1.33.1 | 04 Jul 24 02:42 UTC | 04 Jul 24 02:42 UTC |
	|         | --images=MetricsServer=registry.k8s.io/echoserver:1.4  |                        |         |         |                     |                     |
	|         | --registries=MetricsServer=fake.domain                 |                        |         |         |                     |                     |
	| stop    | -p embed-certs-430955                                  | embed-certs-430955     | jenkins | v1.33.1 | 04 Jul 24 02:42 UTC | 04 Jul 24 02:42 UTC |
	|         | --alsologtostderr -v=3                                 |                        |         |         |                     |                     |
	| addons  | enable dashboard -p embed-certs-430955                 | embed-certs-430955     | jenkins | v1.33.1 | 04 Jul 24 02:42 UTC | 04 Jul 24 02:42 UTC |
	|         | --images=MetricsScraper=registry.k8s.io/echoserver:1.4 |                        |         |         |                     |                     |
	| start   | -p embed-certs-430955                                  | embed-certs-430955     | jenkins | v1.33.1 | 04 Jul 24 02:42 UTC |                     |
	|         | --memory=2200                                          |                        |         |         |                     |                     |
	|         | --alsologtostderr --wait=true                          |                        |         |         |                     |                     |
	|         | --embed-certs --driver=docker                          |                        |         |         |                     |                     |
	|         | --container-runtime=containerd                         |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.30.2                           |                        |         |         |                     |                     |
	|---------|--------------------------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/07/04 02:42:47
	Running on machine: ip-172-31-30-239
	Binary: Built with gc go1.22.4 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0704 02:42:47.124199 1470916 out.go:291] Setting OutFile to fd 1 ...
	I0704 02:42:47.124346 1470916 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 02:42:47.124357 1470916 out.go:304] Setting ErrFile to fd 2...
	I0704 02:42:47.124362 1470916 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 02:42:47.124600 1470916 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
	I0704 02:42:47.124964 1470916 out.go:298] Setting JSON to false
	I0704 02:42:47.126056 1470916 start.go:129] hostinfo: {"hostname":"ip-172-31-30-239","uptime":30317,"bootTime":1720030650,"procs":226,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1064-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"92f46a7d-c249-4c12-924a-77f64874c910"}
	I0704 02:42:47.126204 1470916 start.go:139] virtualization:  
	I0704 02:42:47.129074 1470916 out.go:177] * [embed-certs-430955] minikube v1.33.1 on Ubuntu 20.04 (arm64)
	I0704 02:42:47.131174 1470916 out.go:177]   - MINIKUBE_LOCATION=18859
	I0704 02:42:47.131252 1470916 notify.go:220] Checking for updates...
	I0704 02:42:47.134768 1470916 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0704 02:42:47.136610 1470916 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig
	I0704 02:42:47.138583 1470916 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube
	I0704 02:42:47.140562 1470916 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0704 02:42:47.143390 1470916 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0704 02:42:47.147084 1470916 config.go:182] Loaded profile config "embed-certs-430955": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 02:42:47.147745 1470916 driver.go:392] Setting default libvirt URI to qemu:///system
	I0704 02:42:47.178432 1470916 docker.go:122] docker version: linux-27.0.3:Docker Engine - Community
	I0704 02:42:47.178570 1470916 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 02:42:47.234326 1470916 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:2 ContainersRunning:1 ContainersPaused:0 ContainersStopped:1 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:41 OomKillDisable:true NGoroutines:51 SystemTime:2024-07-04 02:42:47.224499073 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 02:42:47.234434 1470916 docker.go:295] overlay module found
	I0704 02:42:47.237315 1470916 out.go:177] * Using the docker driver based on existing profile
	I0704 02:42:47.239571 1470916 start.go:297] selected driver: docker
	I0704 02:42:47.239589 1470916 start.go:901] validating driver "docker" against &{Name:embed-certs-430955 KeepContext:false EmbedCerts:true MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:embed-certs-430955 Namespace:default APIServerHAVIP: APIServerN
ame:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.85.2 Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[dashboard:true default-storageclass:true metrics-server:true storage-provisioner:true] CustomAddonImages:map[MetricsScraper:registry.k8s.io/echoserver:1.4 MetricsServer:registry.k8s.io/echoserver:1.4] CustomAddonRegistries:map[MetricsServer:fake.domain] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false Moun
tString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0704 02:42:47.239707 1470916 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0704 02:42:47.240337 1470916 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 02:42:47.292150 1470916 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:2 ContainersRunning:1 ContainersPaused:0 ContainersStopped:1 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:41 OomKillDisable:true NGoroutines:51 SystemTime:2024-07-04 02:42:47.283096462 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 02:42:47.292548 1470916 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0704 02:42:47.292575 1470916 cni.go:84] Creating CNI manager for ""
	I0704 02:42:47.292583 1470916 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0704 02:42:47.292624 1470916 start.go:340] cluster config:
	{Name:embed-certs-430955 KeepContext:false EmbedCerts:true MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:embed-certs-430955 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Contain
erRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.85.2 Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[dashboard:true default-storageclass:true metrics-server:true storage-provisioner:true] CustomAddonImages:map[MetricsScraper:registry.k8s.io/echoserver:1.4 MetricsServer:registry.k8s.io/echoserver:1.4] CustomAddonRegistries:map[MetricsServer:fake.domain] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker Mou
ntIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0704 02:42:47.295099 1470916 out.go:177] * Starting "embed-certs-430955" primary control-plane node in "embed-certs-430955" cluster
	I0704 02:42:47.297923 1470916 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0704 02:42:47.301378 1470916 out.go:177] * Pulling base image v0.0.44-1719972989-19184 ...
	I0704 02:42:47.303601 1470916 preload.go:132] Checking if preload exists for k8s version v1.30.2 and runtime containerd
	I0704 02:42:47.303664 1470916 preload.go:147] Found local preload: /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.30.2-containerd-overlay2-arm64.tar.lz4
	I0704 02:42:47.303679 1470916 cache.go:56] Caching tarball of preloaded images
	I0704 02:42:47.303796 1470916 preload.go:173] Found /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.30.2-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0704 02:42:47.303812 1470916 cache.go:59] Finished verifying existence of preloaded tar for v1.30.2 on containerd
	I0704 02:42:47.303938 1470916 profile.go:143] Saving config to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/embed-certs-430955/config.json ...
	I0704 02:42:47.304164 1470916 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 in local docker daemon
	I0704 02:42:47.319822 1470916 image.go:83] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 in local docker daemon, skipping pull
	I0704 02:42:47.319847 1470916 cache.go:144] gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 exists in daemon, skipping load
	I0704 02:42:47.319870 1470916 cache.go:194] Successfully downloaded all kic artifacts
	I0704 02:42:47.319913 1470916 start.go:360] acquireMachinesLock for embed-certs-430955: {Name:mk5d60f1d6a8626365e2379ef5eea6d46c6f6d64 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0704 02:42:47.319999 1470916 start.go:364] duration metric: took 54.612µs to acquireMachinesLock for "embed-certs-430955"
	I0704 02:42:47.320022 1470916 start.go:96] Skipping create...Using existing machine configuration
	I0704 02:42:47.320032 1470916 fix.go:54] fixHost starting: 
	I0704 02:42:47.320318 1470916 cli_runner.go:164] Run: docker container inspect embed-certs-430955 --format={{.State.Status}}
	I0704 02:42:47.336318 1470916 fix.go:112] recreateIfNeeded on embed-certs-430955: state=Stopped err=<nil>
	W0704 02:42:47.336351 1470916 fix.go:138] unexpected machine state, will restart: <nil>
	I0704 02:42:47.339157 1470916 out.go:177] * Restarting existing docker container for "embed-certs-430955" ...
	I0704 02:42:44.301588 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:46.302758 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:47.341788 1470916 cli_runner.go:164] Run: docker start embed-certs-430955
	I0704 02:42:47.648798 1470916 cli_runner.go:164] Run: docker container inspect embed-certs-430955 --format={{.State.Status}}
	I0704 02:42:47.677988 1470916 kic.go:430] container "embed-certs-430955" state is running.
	I0704 02:42:47.678380 1470916 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" embed-certs-430955
	I0704 02:42:47.699694 1470916 profile.go:143] Saving config to /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/embed-certs-430955/config.json ...
	I0704 02:42:47.700161 1470916 machine.go:94] provisionDockerMachine start ...
	I0704 02:42:47.700233 1470916 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" embed-certs-430955
	I0704 02:42:47.722767 1470916 main.go:141] libmachine: Using SSH client type: native
	I0704 02:42:47.723043 1470916 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e2ba0] 0x3e5400 <nil>  [] 0s} 127.0.0.1 34286 <nil> <nil>}
	I0704 02:42:47.723052 1470916 main.go:141] libmachine: About to run SSH command:
	hostname
	I0704 02:42:47.723914 1470916 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0704 02:42:50.874958 1470916 main.go:141] libmachine: SSH cmd err, output: <nil>: embed-certs-430955
	
	I0704 02:42:50.874983 1470916 ubuntu.go:169] provisioning hostname "embed-certs-430955"
	I0704 02:42:50.875048 1470916 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" embed-certs-430955
	I0704 02:42:50.892168 1470916 main.go:141] libmachine: Using SSH client type: native
	I0704 02:42:50.892415 1470916 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e2ba0] 0x3e5400 <nil>  [] 0s} 127.0.0.1 34286 <nil> <nil>}
	I0704 02:42:50.892477 1470916 main.go:141] libmachine: About to run SSH command:
	sudo hostname embed-certs-430955 && echo "embed-certs-430955" | sudo tee /etc/hostname
	I0704 02:42:51.048797 1470916 main.go:141] libmachine: SSH cmd err, output: <nil>: embed-certs-430955
	
	I0704 02:42:51.048913 1470916 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" embed-certs-430955
	I0704 02:42:51.068930 1470916 main.go:141] libmachine: Using SSH client type: native
	I0704 02:42:51.069188 1470916 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e2ba0] 0x3e5400 <nil>  [] 0s} 127.0.0.1 34286 <nil> <nil>}
	I0704 02:42:51.069217 1470916 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sembed-certs-430955' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 embed-certs-430955/g' /etc/hosts;
				else 
					echo '127.0.1.1 embed-certs-430955' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0704 02:42:51.211855 1470916 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0704 02:42:51.211882 1470916 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/18859-1190282/.minikube CaCertPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/18859-1190282/.minikube}
	I0704 02:42:51.211925 1470916 ubuntu.go:177] setting up certificates
	I0704 02:42:51.211935 1470916 provision.go:84] configureAuth start
	I0704 02:42:51.212005 1470916 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" embed-certs-430955
	I0704 02:42:51.228279 1470916 provision.go:143] copyHostCerts
	I0704 02:42:51.228350 1470916 exec_runner.go:144] found /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.pem, removing ...
	I0704 02:42:51.228369 1470916 exec_runner.go:203] rm: /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.pem
	I0704 02:42:51.228453 1470916 exec_runner.go:151] cp: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.pem (1078 bytes)
	I0704 02:42:51.228559 1470916 exec_runner.go:144] found /home/jenkins/minikube-integration/18859-1190282/.minikube/cert.pem, removing ...
	I0704 02:42:51.228570 1470916 exec_runner.go:203] rm: /home/jenkins/minikube-integration/18859-1190282/.minikube/cert.pem
	I0704 02:42:51.228598 1470916 exec_runner.go:151] cp: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/18859-1190282/.minikube/cert.pem (1123 bytes)
	I0704 02:42:51.228652 1470916 exec_runner.go:144] found /home/jenkins/minikube-integration/18859-1190282/.minikube/key.pem, removing ...
	I0704 02:42:51.228662 1470916 exec_runner.go:203] rm: /home/jenkins/minikube-integration/18859-1190282/.minikube/key.pem
	I0704 02:42:51.228687 1470916 exec_runner.go:151] cp: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/18859-1190282/.minikube/key.pem (1675 bytes)
	I0704 02:42:51.228743 1470916 provision.go:117] generating server cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca-key.pem org=jenkins.embed-certs-430955 san=[127.0.0.1 192.168.85.2 embed-certs-430955 localhost minikube]
	I0704 02:42:51.864365 1470916 provision.go:177] copyRemoteCerts
	I0704 02:42:51.864445 1470916 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0704 02:42:51.864490 1470916 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" embed-certs-430955
	I0704 02:42:51.880891 1470916 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34286 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/embed-certs-430955/id_rsa Username:docker}
	I0704 02:42:51.981047 1470916 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0704 02:42:52.012256 1470916 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server.pem --> /etc/docker/server.pem (1220 bytes)
	I0704 02:42:52.041400 1470916 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0704 02:42:52.067785 1470916 provision.go:87] duration metric: took 855.831788ms to configureAuth
	I0704 02:42:52.067824 1470916 ubuntu.go:193] setting minikube options for container-runtime
	I0704 02:42:52.068053 1470916 config.go:182] Loaded profile config "embed-certs-430955": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 02:42:52.068064 1470916 machine.go:97] duration metric: took 4.367890377s to provisionDockerMachine
	I0704 02:42:52.068074 1470916 start.go:293] postStartSetup for "embed-certs-430955" (driver="docker")
	I0704 02:42:52.068085 1470916 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0704 02:42:52.068144 1470916 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0704 02:42:52.068201 1470916 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" embed-certs-430955
	I0704 02:42:52.084877 1470916 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34286 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/embed-certs-430955/id_rsa Username:docker}
	I0704 02:42:48.303630 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:50.802172 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:52.802864 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:52.185495 1470916 ssh_runner.go:195] Run: cat /etc/os-release
	I0704 02:42:52.188857 1470916 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0704 02:42:52.188894 1470916 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0704 02:42:52.188905 1470916 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0704 02:42:52.188912 1470916 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0704 02:42:52.188922 1470916 filesync.go:126] Scanning /home/jenkins/minikube-integration/18859-1190282/.minikube/addons for local assets ...
	I0704 02:42:52.189004 1470916 filesync.go:126] Scanning /home/jenkins/minikube-integration/18859-1190282/.minikube/files for local assets ...
	I0704 02:42:52.189085 1470916 filesync.go:149] local asset: /home/jenkins/minikube-integration/18859-1190282/.minikube/files/etc/ssl/certs/11956882.pem -> 11956882.pem in /etc/ssl/certs
	I0704 02:42:52.189198 1470916 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0704 02:42:52.198887 1470916 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/files/etc/ssl/certs/11956882.pem --> /etc/ssl/certs/11956882.pem (1708 bytes)
	I0704 02:42:52.224467 1470916 start.go:296] duration metric: took 156.376828ms for postStartSetup
	I0704 02:42:52.224551 1470916 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0704 02:42:52.224597 1470916 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" embed-certs-430955
	I0704 02:42:52.240892 1470916 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34286 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/embed-certs-430955/id_rsa Username:docker}
	I0704 02:42:52.336934 1470916 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0704 02:42:52.341814 1470916 fix.go:56] duration metric: took 5.021773399s for fixHost
	I0704 02:42:52.341840 1470916 start.go:83] releasing machines lock for "embed-certs-430955", held for 5.021829865s
	I0704 02:42:52.341919 1470916 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" embed-certs-430955
	I0704 02:42:52.357923 1470916 ssh_runner.go:195] Run: cat /version.json
	I0704 02:42:52.357973 1470916 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0704 02:42:52.357975 1470916 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" embed-certs-430955
	I0704 02:42:52.358023 1470916 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" embed-certs-430955
	I0704 02:42:52.375118 1470916 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34286 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/embed-certs-430955/id_rsa Username:docker}
	I0704 02:42:52.381436 1470916 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34286 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/embed-certs-430955/id_rsa Username:docker}
	I0704 02:42:52.599752 1470916 ssh_runner.go:195] Run: systemctl --version
	I0704 02:42:52.604496 1470916 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0704 02:42:52.608790 1470916 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0704 02:42:52.631386 1470916 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0704 02:42:52.631468 1470916 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%!p(MISSING), " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0704 02:42:52.640664 1470916 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0704 02:42:52.640741 1470916 start.go:495] detecting cgroup driver to use...
	I0704 02:42:52.640803 1470916 detect.go:196] detected "cgroupfs" cgroup driver on host os
	I0704 02:42:52.640882 1470916 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0704 02:42:52.655176 1470916 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0704 02:42:52.667554 1470916 docker.go:217] disabling cri-docker service (if available) ...
	I0704 02:42:52.667622 1470916 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0704 02:42:52.681119 1470916 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0704 02:42:52.692993 1470916 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0704 02:42:52.796329 1470916 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0704 02:42:52.894510 1470916 docker.go:233] disabling docker service ...
	I0704 02:42:52.894581 1470916 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0704 02:42:52.908029 1470916 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0704 02:42:52.919973 1470916 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0704 02:42:53.012128 1470916 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0704 02:42:53.112262 1470916 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0704 02:42:53.124477 1470916 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %!s(MISSING) "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0704 02:42:53.142228 1470916 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.9"|' /etc/containerd/config.toml"
	I0704 02:42:53.152978 1470916 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0704 02:42:53.164013 1470916 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0704 02:42:53.164119 1470916 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0704 02:42:53.174670 1470916 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0704 02:42:53.184923 1470916 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0704 02:42:53.195691 1470916 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0704 02:42:53.206118 1470916 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0704 02:42:53.216104 1470916 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0704 02:42:53.226644 1470916 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0704 02:42:53.238773 1470916 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0704 02:42:53.249222 1470916 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0704 02:42:53.258142 1470916 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0704 02:42:53.266897 1470916 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0704 02:42:53.368156 1470916 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0704 02:42:53.518397 1470916 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0704 02:42:53.518470 1470916 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0704 02:42:53.526068 1470916 start.go:563] Will wait 60s for crictl version
	I0704 02:42:53.526132 1470916 ssh_runner.go:195] Run: which crictl
	I0704 02:42:53.531394 1470916 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0704 02:42:53.573181 1470916 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.18
	RuntimeApiVersion:  v1
	I0704 02:42:53.573319 1470916 ssh_runner.go:195] Run: containerd --version
	I0704 02:42:53.601314 1470916 ssh_runner.go:195] Run: containerd --version
	I0704 02:42:53.635172 1470916 out.go:177] * Preparing Kubernetes v1.30.2 on containerd 1.7.18 ...
	I0704 02:42:53.637866 1470916 cli_runner.go:164] Run: docker network inspect embed-certs-430955 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0704 02:42:53.654197 1470916 ssh_runner.go:195] Run: grep 192.168.85.1	host.minikube.internal$ /etc/hosts
	I0704 02:42:53.657972 1470916 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.85.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0704 02:42:53.669279 1470916 kubeadm.go:877] updating cluster {Name:embed-certs-430955 KeepContext:false EmbedCerts:true MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:embed-certs-430955 Namespace:default APIServerHAVIP: APIServerName:minikubeCA AP
IServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.85.2 Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[dashboard:true default-storageclass:true metrics-server:true storage-provisioner:true] CustomAddonImages:map[MetricsScraper:registry.k8s.io/echoserver:1.4 MetricsServer:registry.k8s.io/echoserver:1.4] CustomAddonRegistries:map[MetricsServer:fake.domain] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jen
kins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0704 02:42:53.669408 1470916 preload.go:132] Checking if preload exists for k8s version v1.30.2 and runtime containerd
	I0704 02:42:53.669474 1470916 ssh_runner.go:195] Run: sudo crictl images --output json
	I0704 02:42:53.707693 1470916 containerd.go:627] all images are preloaded for containerd runtime.
	I0704 02:42:53.707716 1470916 containerd.go:534] Images already preloaded, skipping extraction
	I0704 02:42:53.707777 1470916 ssh_runner.go:195] Run: sudo crictl images --output json
	I0704 02:42:53.746834 1470916 containerd.go:627] all images are preloaded for containerd runtime.
	I0704 02:42:53.746859 1470916 cache_images.go:84] Images are preloaded, skipping loading
	I0704 02:42:53.746867 1470916 kubeadm.go:928] updating node { 192.168.85.2 8443 v1.30.2 containerd true true} ...
	I0704 02:42:53.746975 1470916 kubeadm.go:940] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.30.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=embed-certs-430955 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.85.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.30.2 ClusterName:embed-certs-430955 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0704 02:42:53.747043 1470916 ssh_runner.go:195] Run: sudo crictl info
	I0704 02:42:53.795042 1470916 cni.go:84] Creating CNI manager for ""
	I0704 02:42:53.795069 1470916 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0704 02:42:53.795085 1470916 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0704 02:42:53.795110 1470916 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.85.2 APIServerPort:8443 KubernetesVersion:v1.30.2 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:embed-certs-430955 NodeName:embed-certs-430955 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.85.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.85.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPo
dPath:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0704 02:42:53.795254 1470916 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.85.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "embed-certs-430955"
	  kubeletExtraArgs:
	    node-ip: 192.168.85.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.85.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.30.2
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%!"(MISSING)
	  nodefs.inodesFree: "0%!"(MISSING)
	  imagefs.available: "0%!"(MISSING)
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0704 02:42:53.795335 1470916 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.30.2
	I0704 02:42:53.805310 1470916 binaries.go:44] Found k8s binaries, skipping transfer
	I0704 02:42:53.805451 1470916 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0704 02:42:53.814449 1470916 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (322 bytes)
	I0704 02:42:53.834014 1470916 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0704 02:42:53.855190 1470916 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2172 bytes)
	I0704 02:42:53.874307 1470916 ssh_runner.go:195] Run: grep 192.168.85.2	control-plane.minikube.internal$ /etc/hosts
	I0704 02:42:53.879176 1470916 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.85.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0704 02:42:53.890054 1470916 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0704 02:42:53.986529 1470916 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0704 02:42:54.005313 1470916 certs.go:68] Setting up /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/embed-certs-430955 for IP: 192.168.85.2
	I0704 02:42:54.005347 1470916 certs.go:194] generating shared ca certs ...
	I0704 02:42:54.005368 1470916 certs.go:226] acquiring lock for ca certs: {Name:mk4f0dbc18506f7ee4fcbc10f124348dd208ffc0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 02:42:54.005599 1470916 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.key
	I0704 02:42:54.005680 1470916 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.key
	I0704 02:42:54.005697 1470916 certs.go:256] generating profile certs ...
	I0704 02:42:54.005833 1470916 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/embed-certs-430955/client.key
	I0704 02:42:54.005954 1470916 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/embed-certs-430955/apiserver.key.6f8e09d6
	I0704 02:42:54.006006 1470916 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/embed-certs-430955/proxy-client.key
	I0704 02:42:54.006135 1470916 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/1195688.pem (1338 bytes)
	W0704 02:42:54.006169 1470916 certs.go:480] ignoring /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/1195688_empty.pem, impossibly tiny 0 bytes
	I0704 02:42:54.006178 1470916 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca-key.pem (1679 bytes)
	I0704 02:42:54.006203 1470916 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/ca.pem (1078 bytes)
	I0704 02:42:54.006230 1470916 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/cert.pem (1123 bytes)
	I0704 02:42:54.006253 1470916 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/key.pem (1675 bytes)
	I0704 02:42:54.006328 1470916 certs.go:484] found cert: /home/jenkins/minikube-integration/18859-1190282/.minikube/files/etc/ssl/certs/11956882.pem (1708 bytes)
	I0704 02:42:54.007071 1470916 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0704 02:42:54.038269 1470916 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1671 bytes)
	I0704 02:42:54.068249 1470916 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0704 02:42:54.098798 1470916 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0704 02:42:54.153319 1470916 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/embed-certs-430955/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1428 bytes)
	I0704 02:42:54.182789 1470916 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/embed-certs-430955/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0704 02:42:54.222221 1470916 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/embed-certs-430955/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0704 02:42:54.254594 1470916 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/embed-certs-430955/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0704 02:42:54.283395 1470916 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0704 02:42:54.318493 1470916 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/certs/1195688.pem --> /usr/share/ca-certificates/1195688.pem (1338 bytes)
	I0704 02:42:54.346697 1470916 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18859-1190282/.minikube/files/etc/ssl/certs/11956882.pem --> /usr/share/ca-certificates/11956882.pem (1708 bytes)
	I0704 02:42:54.372055 1470916 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0704 02:42:54.391008 1470916 ssh_runner.go:195] Run: openssl version
	I0704 02:42:54.399057 1470916 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/11956882.pem && ln -fs /usr/share/ca-certificates/11956882.pem /etc/ssl/certs/11956882.pem"
	I0704 02:42:54.408710 1470916 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/11956882.pem
	I0704 02:42:54.412506 1470916 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Jul  4 01:49 /usr/share/ca-certificates/11956882.pem
	I0704 02:42:54.412575 1470916 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/11956882.pem
	I0704 02:42:54.419893 1470916 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/11956882.pem /etc/ssl/certs/3ec20f2e.0"
	I0704 02:42:54.429087 1470916 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0704 02:42:54.438323 1470916 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0704 02:42:54.441993 1470916 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Jul  4 01:08 /usr/share/ca-certificates/minikubeCA.pem
	I0704 02:42:54.442062 1470916 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0704 02:42:54.449349 1470916 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0704 02:42:54.458459 1470916 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/1195688.pem && ln -fs /usr/share/ca-certificates/1195688.pem /etc/ssl/certs/1195688.pem"
	I0704 02:42:54.467882 1470916 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/1195688.pem
	I0704 02:42:54.471426 1470916 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Jul  4 01:49 /usr/share/ca-certificates/1195688.pem
	I0704 02:42:54.471581 1470916 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/1195688.pem
	I0704 02:42:54.478767 1470916 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/1195688.pem /etc/ssl/certs/51391683.0"
	I0704 02:42:54.488060 1470916 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0704 02:42:54.491772 1470916 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0704 02:42:54.499014 1470916 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0704 02:42:54.506334 1470916 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0704 02:42:54.513505 1470916 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0704 02:42:54.520613 1470916 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0704 02:42:54.527779 1470916 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0704 02:42:54.534526 1470916 kubeadm.go:391] StartCluster: {Name:embed-certs-430955 KeepContext:false EmbedCerts:true MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:embed-certs-430955 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISe
rverNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.85.2 Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[dashboard:true default-storageclass:true metrics-server:true storage-provisioner:true] CustomAddonImages:map[MetricsScraper:registry.k8s.io/echoserver:1.4 MetricsServer:registry.k8s.io/echoserver:1.4] CustomAddonRegistries:map[MetricsServer:fake.domain] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkin
s:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0704 02:42:54.534621 1470916 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0704 02:42:54.534690 1470916 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0704 02:42:54.587248 1470916 cri.go:89] found id: "5dcba7e90225bc8ca557e17dff76a9f2abcad582a822caeee2c8581eddbb717a"
	I0704 02:42:54.587283 1470916 cri.go:89] found id: "33793123b42c0ebf992013fbc13c32f9abe7c7bb3dc071d146b96c022a136350"
	I0704 02:42:54.587289 1470916 cri.go:89] found id: "535ee05b0ef0629e661af2eb5941310c574516563ead96b0dbbc2a3ecd83b5b0"
	I0704 02:42:54.587293 1470916 cri.go:89] found id: "05085f5cc1b32c12f0262cfd81c404fda9c364374a62080e5f35af9a85c76702"
	I0704 02:42:54.587315 1470916 cri.go:89] found id: "29ca946fcdfe61ab27ee2a0d1766c963ad9322e7934ebbb25e404aa051f83288"
	I0704 02:42:54.587326 1470916 cri.go:89] found id: "9c41a86a48e129140f09ff3b54f8a776e0b2c7c92494fd5bd537395bc1cacccf"
	I0704 02:42:54.587329 1470916 cri.go:89] found id: "eee96c8cb4b9d3b202c7b410247857f5646821d729dc4b6f6ce968b5c73cdc54"
	I0704 02:42:54.587340 1470916 cri.go:89] found id: "73a22566da6b7c99d835e9b793ae1b7ad4b9107f9712df5da009cb5cc4c97e3e"
	I0704 02:42:54.587348 1470916 cri.go:89] found id: ""
	I0704 02:42:54.587425 1470916 ssh_runner.go:195] Run: sudo runc --root /run/containerd/runc/k8s.io list -f json
	I0704 02:42:54.600107 1470916 cri.go:116] JSON = null
	W0704 02:42:54.600156 1470916 kubeadm.go:398] unpause failed: list paused: list returned 0 containers, but ps returned 8
	I0704 02:42:54.600223 1470916 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	W0704 02:42:54.608930 1470916 kubeadm.go:404] apiserver tunnel failed: apiserver port not set
	I0704 02:42:54.608953 1470916 kubeadm.go:407] found existing configuration files, will attempt cluster restart
	I0704 02:42:54.608967 1470916 kubeadm.go:587] restartPrimaryControlPlane start ...
	I0704 02:42:54.609019 1470916 ssh_runner.go:195] Run: sudo test -d /data/minikube
	I0704 02:42:54.617585 1470916 kubeadm.go:129] /data/minikube skipping compat symlinks: sudo test -d /data/minikube: Process exited with status 1
	stdout:
	
	stderr:
	I0704 02:42:54.618191 1470916 kubeconfig.go:47] verify endpoint returned: get endpoint: "embed-certs-430955" does not appear in /home/jenkins/minikube-integration/18859-1190282/kubeconfig
	I0704 02:42:54.618458 1470916 kubeconfig.go:62] /home/jenkins/minikube-integration/18859-1190282/kubeconfig needs updating (will repair): [kubeconfig missing "embed-certs-430955" cluster setting kubeconfig missing "embed-certs-430955" context setting]
	I0704 02:42:54.618937 1470916 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/kubeconfig: {Name:mkcb1dc68318dea0090dbb67854ab85e2d8d0252 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 02:42:54.620518 1470916 ssh_runner.go:195] Run: sudo diff -u /var/tmp/minikube/kubeadm.yaml /var/tmp/minikube/kubeadm.yaml.new
	I0704 02:42:54.631680 1470916 kubeadm.go:624] The running cluster does not require reconfiguration: 192.168.85.2
	I0704 02:42:54.631727 1470916 kubeadm.go:591] duration metric: took 22.753772ms to restartPrimaryControlPlane
	I0704 02:42:54.631738 1470916 kubeadm.go:393] duration metric: took 97.224129ms to StartCluster
	I0704 02:42:54.631755 1470916 settings.go:142] acquiring lock: {Name:mk6d49b718ddc65478a80e50434df6064c31eee4 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 02:42:54.631831 1470916 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/18859-1190282/kubeconfig
	I0704 02:42:54.633259 1470916 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18859-1190282/kubeconfig: {Name:mkcb1dc68318dea0090dbb67854ab85e2d8d0252 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0704 02:42:54.633493 1470916 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.85.2 Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0704 02:42:54.633777 1470916 config.go:182] Loaded profile config "embed-certs-430955": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 02:42:54.633814 1470916 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:true default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:true nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0704 02:42:54.633870 1470916 addons.go:69] Setting storage-provisioner=true in profile "embed-certs-430955"
	I0704 02:42:54.633890 1470916 addons.go:234] Setting addon storage-provisioner=true in "embed-certs-430955"
	W0704 02:42:54.633901 1470916 addons.go:243] addon storage-provisioner should already be in state true
	I0704 02:42:54.633922 1470916 host.go:66] Checking if "embed-certs-430955" exists ...
	I0704 02:42:54.634563 1470916 cli_runner.go:164] Run: docker container inspect embed-certs-430955 --format={{.State.Status}}
	I0704 02:42:54.635046 1470916 addons.go:69] Setting dashboard=true in profile "embed-certs-430955"
	I0704 02:42:54.635134 1470916 addons.go:234] Setting addon dashboard=true in "embed-certs-430955"
	W0704 02:42:54.635166 1470916 addons.go:243] addon dashboard should already be in state true
	I0704 02:42:54.635231 1470916 host.go:66] Checking if "embed-certs-430955" exists ...
	I0704 02:42:54.636204 1470916 cli_runner.go:164] Run: docker container inspect embed-certs-430955 --format={{.State.Status}}
	I0704 02:42:54.638034 1470916 addons.go:69] Setting default-storageclass=true in profile "embed-certs-430955"
	I0704 02:42:54.638077 1470916 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "embed-certs-430955"
	I0704 02:42:54.638362 1470916 cli_runner.go:164] Run: docker container inspect embed-certs-430955 --format={{.State.Status}}
	I0704 02:42:54.638965 1470916 out.go:177] * Verifying Kubernetes components...
	I0704 02:42:54.639149 1470916 addons.go:69] Setting metrics-server=true in profile "embed-certs-430955"
	I0704 02:42:54.639193 1470916 addons.go:234] Setting addon metrics-server=true in "embed-certs-430955"
	W0704 02:42:54.639229 1470916 addons.go:243] addon metrics-server should already be in state true
	I0704 02:42:54.639276 1470916 host.go:66] Checking if "embed-certs-430955" exists ...
	I0704 02:42:54.640991 1470916 cli_runner.go:164] Run: docker container inspect embed-certs-430955 --format={{.State.Status}}
	I0704 02:42:54.641315 1470916 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0704 02:42:54.690457 1470916 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0704 02:42:54.697391 1470916 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0704 02:42:54.697413 1470916 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0704 02:42:54.697478 1470916 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" embed-certs-430955
	I0704 02:42:54.719759 1470916 addons.go:234] Setting addon default-storageclass=true in "embed-certs-430955"
	W0704 02:42:54.719793 1470916 addons.go:243] addon default-storageclass should already be in state true
	I0704 02:42:54.719823 1470916 host.go:66] Checking if "embed-certs-430955" exists ...
	I0704 02:42:54.720271 1470916 cli_runner.go:164] Run: docker container inspect embed-certs-430955 --format={{.State.Status}}
	I0704 02:42:54.727522 1470916 out.go:177]   - Using image docker.io/kubernetesui/dashboard:v2.7.0
	I0704 02:42:54.731017 1470916 out.go:177]   - Using image registry.k8s.io/echoserver:1.4
	I0704 02:42:54.739592 1470916 addons.go:431] installing /etc/kubernetes/addons/dashboard-ns.yaml
	I0704 02:42:54.739625 1470916 ssh_runner.go:362] scp dashboard/dashboard-ns.yaml --> /etc/kubernetes/addons/dashboard-ns.yaml (759 bytes)
	I0704 02:42:54.739702 1470916 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" embed-certs-430955
	I0704 02:42:54.755786 1470916 out.go:177]   - Using image fake.domain/registry.k8s.io/echoserver:1.4
	I0704 02:42:54.758337 1470916 addons.go:431] installing /etc/kubernetes/addons/metrics-apiservice.yaml
	I0704 02:42:54.758362 1470916 ssh_runner.go:362] scp metrics-server/metrics-apiservice.yaml --> /etc/kubernetes/addons/metrics-apiservice.yaml (424 bytes)
	I0704 02:42:54.758436 1470916 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" embed-certs-430955
	I0704 02:42:54.765388 1470916 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34286 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/embed-certs-430955/id_rsa Username:docker}
	I0704 02:42:54.775653 1470916 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0704 02:42:54.775689 1470916 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0704 02:42:54.775758 1470916 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" embed-certs-430955
	I0704 02:42:54.816303 1470916 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34286 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/embed-certs-430955/id_rsa Username:docker}
	I0704 02:42:54.817018 1470916 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34286 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/embed-certs-430955/id_rsa Username:docker}
	I0704 02:42:54.830072 1470916 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34286 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/embed-certs-430955/id_rsa Username:docker}
	I0704 02:42:54.897258 1470916 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0704 02:42:54.958394 1470916 node_ready.go:35] waiting up to 6m0s for node "embed-certs-430955" to be "Ready" ...
	I0704 02:42:55.116296 1470916 addons.go:431] installing /etc/kubernetes/addons/dashboard-clusterrole.yaml
	I0704 02:42:55.116324 1470916 ssh_runner.go:362] scp dashboard/dashboard-clusterrole.yaml --> /etc/kubernetes/addons/dashboard-clusterrole.yaml (1001 bytes)
	I0704 02:42:55.172079 1470916 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0704 02:42:55.248692 1470916 addons.go:431] installing /etc/kubernetes/addons/metrics-server-deployment.yaml
	I0704 02:42:55.248712 1470916 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-deployment.yaml (1825 bytes)
	I0704 02:42:55.271421 1470916 addons.go:431] installing /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml
	I0704 02:42:55.271518 1470916 ssh_runner.go:362] scp dashboard/dashboard-clusterrolebinding.yaml --> /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml (1018 bytes)
	I0704 02:42:55.282073 1470916 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0704 02:42:55.346040 1470916 addons.go:431] installing /etc/kubernetes/addons/metrics-server-rbac.yaml
	I0704 02:42:55.346110 1470916 ssh_runner.go:362] scp metrics-server/metrics-server-rbac.yaml --> /etc/kubernetes/addons/metrics-server-rbac.yaml (2175 bytes)
	I0704 02:42:55.396209 1470916 addons.go:431] installing /etc/kubernetes/addons/dashboard-configmap.yaml
	I0704 02:42:55.396296 1470916 ssh_runner.go:362] scp dashboard/dashboard-configmap.yaml --> /etc/kubernetes/addons/dashboard-configmap.yaml (837 bytes)
	I0704 02:42:55.463752 1470916 addons.go:431] installing /etc/kubernetes/addons/metrics-server-service.yaml
	I0704 02:42:55.463823 1470916 ssh_runner.go:362] scp metrics-server/metrics-server-service.yaml --> /etc/kubernetes/addons/metrics-server-service.yaml (446 bytes)
	W0704 02:42:55.491361 1470916 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0704 02:42:55.491396 1470916 retry.go:31] will retry after 335.192891ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0704 02:42:55.498857 1470916 addons.go:431] installing /etc/kubernetes/addons/dashboard-dp.yaml
	I0704 02:42:55.498889 1470916 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/dashboard-dp.yaml (4201 bytes)
	I0704 02:42:55.620918 1470916 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	W0704 02:42:55.661047 1470916 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0704 02:42:55.661078 1470916 retry.go:31] will retry after 286.910643ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
	stdout:
	
	stderr:
	error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
	I0704 02:42:55.736879 1470916 addons.go:431] installing /etc/kubernetes/addons/dashboard-role.yaml
	I0704 02:42:55.736912 1470916 ssh_runner.go:362] scp dashboard/dashboard-role.yaml --> /etc/kubernetes/addons/dashboard-role.yaml (1724 bytes)
	I0704 02:42:55.827526 1470916 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0704 02:42:55.881789 1470916 addons.go:431] installing /etc/kubernetes/addons/dashboard-rolebinding.yaml
	I0704 02:42:55.881829 1470916 ssh_runner.go:362] scp dashboard/dashboard-rolebinding.yaml --> /etc/kubernetes/addons/dashboard-rolebinding.yaml (1046 bytes)
	I0704 02:42:55.948584 1470916 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml
	I0704 02:42:56.012952 1470916 addons.go:431] installing /etc/kubernetes/addons/dashboard-sa.yaml
	I0704 02:42:56.012977 1470916 ssh_runner.go:362] scp dashboard/dashboard-sa.yaml --> /etc/kubernetes/addons/dashboard-sa.yaml (837 bytes)
	I0704 02:42:56.178215 1470916 addons.go:431] installing /etc/kubernetes/addons/dashboard-secret.yaml
	I0704 02:42:56.178279 1470916 ssh_runner.go:362] scp dashboard/dashboard-secret.yaml --> /etc/kubernetes/addons/dashboard-secret.yaml (1389 bytes)
	I0704 02:42:56.231086 1470916 addons.go:431] installing /etc/kubernetes/addons/dashboard-svc.yaml
	I0704 02:42:56.231119 1470916 ssh_runner.go:362] scp dashboard/dashboard-svc.yaml --> /etc/kubernetes/addons/dashboard-svc.yaml (1294 bytes)
	I0704 02:42:56.265698 1470916 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml
	I0704 02:42:54.918811 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:42:57.302656 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:43:00.593312 1470916 node_ready.go:49] node "embed-certs-430955" has status "Ready":"True"
	I0704 02:43:00.593335 1470916 node_ready.go:38] duration metric: took 5.634899675s for node "embed-certs-430955" to be "Ready" ...
	I0704 02:43:00.593345 1470916 pod_ready.go:35] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0704 02:43:00.620693 1470916 pod_ready.go:78] waiting up to 6m0s for pod "coredns-7db6d8ff4d-v5z9t" in "kube-system" namespace to be "Ready" ...
	I0704 02:43:00.642967 1470916 pod_ready.go:92] pod "coredns-7db6d8ff4d-v5z9t" in "kube-system" namespace has status "Ready":"True"
	I0704 02:43:00.643042 1470916 pod_ready.go:81] duration metric: took 22.274251ms for pod "coredns-7db6d8ff4d-v5z9t" in "kube-system" namespace to be "Ready" ...
	I0704 02:43:00.643069 1470916 pod_ready.go:78] waiting up to 6m0s for pod "etcd-embed-certs-430955" in "kube-system" namespace to be "Ready" ...
	I0704 02:43:00.654451 1470916 pod_ready.go:92] pod "etcd-embed-certs-430955" in "kube-system" namespace has status "Ready":"True"
	I0704 02:43:00.654525 1470916 pod_ready.go:81] duration metric: took 11.434932ms for pod "etcd-embed-certs-430955" in "kube-system" namespace to be "Ready" ...
	I0704 02:43:00.654554 1470916 pod_ready.go:78] waiting up to 6m0s for pod "kube-apiserver-embed-certs-430955" in "kube-system" namespace to be "Ready" ...
	I0704 02:43:00.667389 1470916 pod_ready.go:92] pod "kube-apiserver-embed-certs-430955" in "kube-system" namespace has status "Ready":"True"
	I0704 02:43:00.667472 1470916 pod_ready.go:81] duration metric: took 12.885394ms for pod "kube-apiserver-embed-certs-430955" in "kube-system" namespace to be "Ready" ...
	I0704 02:43:00.667567 1470916 pod_ready.go:78] waiting up to 6m0s for pod "kube-controller-manager-embed-certs-430955" in "kube-system" namespace to be "Ready" ...
	I0704 02:43:00.678509 1470916 pod_ready.go:92] pod "kube-controller-manager-embed-certs-430955" in "kube-system" namespace has status "Ready":"True"
	I0704 02:43:00.678579 1470916 pod_ready.go:81] duration metric: took 10.989896ms for pod "kube-controller-manager-embed-certs-430955" in "kube-system" namespace to be "Ready" ...
	I0704 02:43:00.678618 1470916 pod_ready.go:78] waiting up to 6m0s for pod "kube-proxy-j2444" in "kube-system" namespace to be "Ready" ...
	I0704 02:43:00.800386 1470916 pod_ready.go:92] pod "kube-proxy-j2444" in "kube-system" namespace has status "Ready":"True"
	I0704 02:43:00.800412 1470916 pod_ready.go:81] duration metric: took 121.769722ms for pod "kube-proxy-j2444" in "kube-system" namespace to be "Ready" ...
	I0704 02:43:00.800422 1470916 pod_ready.go:78] waiting up to 6m0s for pod "kube-scheduler-embed-certs-430955" in "kube-system" namespace to be "Ready" ...
	I0704 02:43:01.200426 1470916 pod_ready.go:92] pod "kube-scheduler-embed-certs-430955" in "kube-system" namespace has status "Ready":"True"
	I0704 02:43:01.200453 1470916 pod_ready.go:81] duration metric: took 400.022412ms for pod "kube-scheduler-embed-certs-430955" in "kube-system" namespace to be "Ready" ...
	I0704 02:43:01.200466 1470916 pod_ready.go:78] waiting up to 6m0s for pod "metrics-server-569cc877fc-97mch" in "kube-system" namespace to be "Ready" ...
	I0704 02:42:59.803098 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:43:01.803611 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:43:03.217727 1470916 pod_ready.go:102] pod "metrics-server-569cc877fc-97mch" in "kube-system" namespace has status "Ready":"False"
	I0704 02:43:03.642638 1470916 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: (8.021673361s)
	I0704 02:43:03.642673 1470916 addons.go:475] Verifying addon metrics-server=true in "embed-certs-430955"
	I0704 02:43:03.789172 1470916 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: (7.840546408s)
	I0704 02:43:03.789966 1470916 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: (7.962397269s)
	I0704 02:43:03.920575 1470916 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.30.2/kubectl apply -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: (7.654822874s)
	I0704 02:43:03.923554 1470916 out.go:177] * Some dashboard features require the metrics-server addon. To enable all features please run:
	
		minikube -p embed-certs-430955 addons enable metrics-server
	
	I0704 02:43:03.926200 1470916 out.go:177] * Enabled addons: metrics-server, storage-provisioner, default-storageclass, dashboard
	I0704 02:43:03.928813 1470916 addons.go:510] duration metric: took 9.294992847s for enable addons: enabled=[metrics-server storage-provisioner default-storageclass dashboard]
	I0704 02:43:05.706346 1470916 pod_ready.go:102] pod "metrics-server-569cc877fc-97mch" in "kube-system" namespace has status "Ready":"False"
	I0704 02:43:03.805495 1461358 pod_ready.go:102] pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace has status "Ready":"False"
	I0704 02:43:03.805577 1461358 pod_ready.go:81] duration metric: took 4m0.009979608s for pod "metrics-server-9975d5f86-hvdg7" in "kube-system" namespace to be "Ready" ...
	E0704 02:43:03.805602 1461358 pod_ready.go:66] WaitExtra: waitPodCondition: context deadline exceeded
	I0704 02:43:03.805624 1461358 pod_ready.go:38] duration metric: took 5m21.812430295s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0704 02:43:03.805672 1461358 api_server.go:52] waiting for apiserver process to appear ...
	I0704 02:43:03.805728 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kube-apiserver Namespaces:[]}
	I0704 02:43:03.805834 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0704 02:43:03.864499 1461358 cri.go:89] found id: "60850fe005f0ab5cfc576b645d25d0daaadbd790e2fc616900504e76c1a1950d"
	I0704 02:43:03.864518 1461358 cri.go:89] found id: "186d8c36c000cb07d64c9a71d61cc5b1cf836660ec112c7bc99feeea6eb45bd1"
	I0704 02:43:03.864523 1461358 cri.go:89] found id: ""
	I0704 02:43:03.864530 1461358 logs.go:276] 2 containers: [60850fe005f0ab5cfc576b645d25d0daaadbd790e2fc616900504e76c1a1950d 186d8c36c000cb07d64c9a71d61cc5b1cf836660ec112c7bc99feeea6eb45bd1]
	I0704 02:43:03.864588 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:03.868307 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:03.872242 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:etcd Namespaces:[]}
	I0704 02:43:03.872311 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0704 02:43:03.939796 1461358 cri.go:89] found id: "8a8399e4a7b68b69943912a3d46d948be98f305a699513a7a8a124736077d249"
	I0704 02:43:03.939864 1461358 cri.go:89] found id: "1332e9cb16fba0754aab5a90d7b820adf8d9a1d1c927c7ca3c4030879eae0e39"
	I0704 02:43:03.939900 1461358 cri.go:89] found id: ""
	I0704 02:43:03.939925 1461358 logs.go:276] 2 containers: [8a8399e4a7b68b69943912a3d46d948be98f305a699513a7a8a124736077d249 1332e9cb16fba0754aab5a90d7b820adf8d9a1d1c927c7ca3c4030879eae0e39]
	I0704 02:43:03.940009 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:03.944079 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:03.948192 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:coredns Namespaces:[]}
	I0704 02:43:03.948273 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0704 02:43:03.990309 1461358 cri.go:89] found id: "58c28bfb15459945dd5649b964ec42a50cc8cbdc4feda5222594cdf64bf0bc09"
	I0704 02:43:03.990331 1461358 cri.go:89] found id: "71e03ac7da71796f0a3c61c8049ad7bacbefbec7a5934b306ad0b70906bb323f"
	I0704 02:43:03.990336 1461358 cri.go:89] found id: ""
	I0704 02:43:03.990344 1461358 logs.go:276] 2 containers: [58c28bfb15459945dd5649b964ec42a50cc8cbdc4feda5222594cdf64bf0bc09 71e03ac7da71796f0a3c61c8049ad7bacbefbec7a5934b306ad0b70906bb323f]
	I0704 02:43:03.990402 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:03.995011 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:03.998649 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kube-scheduler Namespaces:[]}
	I0704 02:43:03.998723 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0704 02:43:04.050731 1461358 cri.go:89] found id: "50f1aed2fd69db9f1469ae8c509e044f5883a8af17b914e3ccea66e5159e538c"
	I0704 02:43:04.050754 1461358 cri.go:89] found id: "c77b76d8ee8e90997b74a44444dda1ceb1606c1bcac6e2e897f099aefbae9ca5"
	I0704 02:43:04.050760 1461358 cri.go:89] found id: ""
	I0704 02:43:04.050768 1461358 logs.go:276] 2 containers: [50f1aed2fd69db9f1469ae8c509e044f5883a8af17b914e3ccea66e5159e538c c77b76d8ee8e90997b74a44444dda1ceb1606c1bcac6e2e897f099aefbae9ca5]
	I0704 02:43:04.050847 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.054840 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.058996 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kube-proxy Namespaces:[]}
	I0704 02:43:04.059074 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0704 02:43:04.110393 1461358 cri.go:89] found id: "c87928a87c1d3d7d27dafbbb731a758e72e428732de7dc39c2c952505e65234f"
	I0704 02:43:04.110418 1461358 cri.go:89] found id: "e36274ec135b818053c9b1ddec36771b772a75851df732aa1a4fa768cc912ac8"
	I0704 02:43:04.110423 1461358 cri.go:89] found id: ""
	I0704 02:43:04.110431 1461358 logs.go:276] 2 containers: [c87928a87c1d3d7d27dafbbb731a758e72e428732de7dc39c2c952505e65234f e36274ec135b818053c9b1ddec36771b772a75851df732aa1a4fa768cc912ac8]
	I0704 02:43:04.110489 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.115777 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.119173 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kube-controller-manager Namespaces:[]}
	I0704 02:43:04.119258 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0704 02:43:04.162339 1461358 cri.go:89] found id: "83d7189811f75b1bd3a98867c17d6570b9bfcd834d612431e0cb4e1fab3f4cb7"
	I0704 02:43:04.162362 1461358 cri.go:89] found id: "c1881aed94a4bf9ecfebcec75304d41e97b25974bd8a653d24b470be7bbaa222"
	I0704 02:43:04.162367 1461358 cri.go:89] found id: ""
	I0704 02:43:04.162374 1461358 logs.go:276] 2 containers: [83d7189811f75b1bd3a98867c17d6570b9bfcd834d612431e0cb4e1fab3f4cb7 c1881aed94a4bf9ecfebcec75304d41e97b25974bd8a653d24b470be7bbaa222]
	I0704 02:43:04.162433 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.166162 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.169642 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kindnet Namespaces:[]}
	I0704 02:43:04.169732 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0704 02:43:04.219361 1461358 cri.go:89] found id: "e8761557ad0a992859e02050fbcb50c867e6456f15d43a1922860d58c918475e"
	I0704 02:43:04.219379 1461358 cri.go:89] found id: "ff0d8a658cd4232702700190007d62520a4a45872a028529b9ed36fe6dc3549f"
	I0704 02:43:04.219384 1461358 cri.go:89] found id: ""
	I0704 02:43:04.219391 1461358 logs.go:276] 2 containers: [e8761557ad0a992859e02050fbcb50c867e6456f15d43a1922860d58c918475e ff0d8a658cd4232702700190007d62520a4a45872a028529b9ed36fe6dc3549f]
	I0704 02:43:04.219446 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.223078 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.226540 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kubernetes-dashboard Namespaces:[]}
	I0704 02:43:04.226613 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kubernetes-dashboard
	I0704 02:43:04.311937 1461358 cri.go:89] found id: "11d610a49bcd9d793ec0a3506ed0f5ea2c0c963cab7b6867ac5b304ba0ad0e3c"
	I0704 02:43:04.311999 1461358 cri.go:89] found id: ""
	I0704 02:43:04.312014 1461358 logs.go:276] 1 containers: [11d610a49bcd9d793ec0a3506ed0f5ea2c0c963cab7b6867ac5b304ba0ad0e3c]
	I0704 02:43:04.312084 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.315775 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:storage-provisioner Namespaces:[]}
	I0704 02:43:04.315884 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=storage-provisioner
	I0704 02:43:04.358152 1461358 cri.go:89] found id: "a4760ff354cafc5cfc98e0cf499f32eac6195eb5b19a09c3b9252ff1ed6937a2"
	I0704 02:43:04.358175 1461358 cri.go:89] found id: "946bdba0449bb980124c9355f39e4f28729385ded40518e3f52317b9f372be0a"
	I0704 02:43:04.358180 1461358 cri.go:89] found id: ""
	I0704 02:43:04.358187 1461358 logs.go:276] 2 containers: [a4760ff354cafc5cfc98e0cf499f32eac6195eb5b19a09c3b9252ff1ed6937a2 946bdba0449bb980124c9355f39e4f28729385ded40518e3f52317b9f372be0a]
	I0704 02:43:04.358244 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.362325 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:04.365858 1461358 logs.go:123] Gathering logs for kindnet [e8761557ad0a992859e02050fbcb50c867e6456f15d43a1922860d58c918475e] ...
	I0704 02:43:04.365883 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 e8761557ad0a992859e02050fbcb50c867e6456f15d43a1922860d58c918475e"
	I0704 02:43:04.406466 1461358 logs.go:123] Gathering logs for kube-proxy [c87928a87c1d3d7d27dafbbb731a758e72e428732de7dc39c2c952505e65234f] ...
	I0704 02:43:04.406496 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 c87928a87c1d3d7d27dafbbb731a758e72e428732de7dc39c2c952505e65234f"
	I0704 02:43:04.446192 1461358 logs.go:123] Gathering logs for kube-controller-manager [83d7189811f75b1bd3a98867c17d6570b9bfcd834d612431e0cb4e1fab3f4cb7] ...
	I0704 02:43:04.446222 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 83d7189811f75b1bd3a98867c17d6570b9bfcd834d612431e0cb4e1fab3f4cb7"
	I0704 02:43:04.501463 1461358 logs.go:123] Gathering logs for kube-controller-manager [c1881aed94a4bf9ecfebcec75304d41e97b25974bd8a653d24b470be7bbaa222] ...
	I0704 02:43:04.501496 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 c1881aed94a4bf9ecfebcec75304d41e97b25974bd8a653d24b470be7bbaa222"
	I0704 02:43:04.573576 1461358 logs.go:123] Gathering logs for kubernetes-dashboard [11d610a49bcd9d793ec0a3506ed0f5ea2c0c963cab7b6867ac5b304ba0ad0e3c] ...
	I0704 02:43:04.573616 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 11d610a49bcd9d793ec0a3506ed0f5ea2c0c963cab7b6867ac5b304ba0ad0e3c"
	I0704 02:43:04.623005 1461358 logs.go:123] Gathering logs for storage-provisioner [a4760ff354cafc5cfc98e0cf499f32eac6195eb5b19a09c3b9252ff1ed6937a2] ...
	I0704 02:43:04.623033 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 a4760ff354cafc5cfc98e0cf499f32eac6195eb5b19a09c3b9252ff1ed6937a2"
	I0704 02:43:04.665925 1461358 logs.go:123] Gathering logs for container status ...
	I0704 02:43:04.665953 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0704 02:43:04.715865 1461358 logs.go:123] Gathering logs for kube-apiserver [60850fe005f0ab5cfc576b645d25d0daaadbd790e2fc616900504e76c1a1950d] ...
	I0704 02:43:04.715897 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 60850fe005f0ab5cfc576b645d25d0daaadbd790e2fc616900504e76c1a1950d"
	I0704 02:43:04.781858 1461358 logs.go:123] Gathering logs for coredns [58c28bfb15459945dd5649b964ec42a50cc8cbdc4feda5222594cdf64bf0bc09] ...
	I0704 02:43:04.781896 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 58c28bfb15459945dd5649b964ec42a50cc8cbdc4feda5222594cdf64bf0bc09"
	I0704 02:43:04.827261 1461358 logs.go:123] Gathering logs for kube-apiserver [186d8c36c000cb07d64c9a71d61cc5b1cf836660ec112c7bc99feeea6eb45bd1] ...
	I0704 02:43:04.827297 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 186d8c36c000cb07d64c9a71d61cc5b1cf836660ec112c7bc99feeea6eb45bd1"
	I0704 02:43:04.884061 1461358 logs.go:123] Gathering logs for etcd [1332e9cb16fba0754aab5a90d7b820adf8d9a1d1c927c7ca3c4030879eae0e39] ...
	I0704 02:43:04.884099 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 1332e9cb16fba0754aab5a90d7b820adf8d9a1d1c927c7ca3c4030879eae0e39"
	I0704 02:43:04.956000 1461358 logs.go:123] Gathering logs for kubelet ...
	I0704 02:43:04.956029 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	W0704 02:43:05.020230 1461358 logs.go:138] Found kubelet problem: Jul 04 02:37:43 old-k8s-version-610521 kubelet[660]: E0704 02:37:43.141075     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:05.020461 1461358 logs.go:138] Found kubelet problem: Jul 04 02:37:43 old-k8s-version-610521 kubelet[660]: E0704 02:37:43.711599     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.023925 1461358 logs.go:138] Found kubelet problem: Jul 04 02:37:58 old-k8s-version-610521 kubelet[660]: E0704 02:37:58.532033     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:05.024354 1461358 logs.go:138] Found kubelet problem: Jul 04 02:37:58 old-k8s-version-610521 kubelet[660]: E0704 02:37:58.584292     660 reflector.go:138] object-"kubernetes-dashboard"/"kubernetes-dashboard-token-hbf2h": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets "kubernetes-dashboard-token-hbf2h" is forbidden: User "system:node:old-k8s-version-610521" cannot list resource "secrets" in API group "" in the namespace "kubernetes-dashboard": no relationship found between node 'old-k8s-version-610521' and this object
	W0704 02:43:05.026261 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:05 old-k8s-version-610521 kubelet[660]: E0704 02:38:05.812067     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 10s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.026595 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:06 old-k8s-version-610521 kubelet[660]: E0704 02:38:06.816842     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 10s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.026958 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:07 old-k8s-version-610521 kubelet[660]: E0704 02:38:07.834643     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 10s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.027151 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:09 old-k8s-version-610521 kubelet[660]: E0704 02:38:09.521695     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.027992 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:14 old-k8s-version-610521 kubelet[660]: E0704 02:38:14.843471     660 pod_workers.go:191] Error syncing pod c0dd39a9-a4fa-4097-8061-f4d356bedb93 ("storage-provisioner_kube-system(c0dd39a9-a4fa-4097-8061-f4d356bedb93)"), skipping: failed to "StartContainer" for "storage-provisioner" with CrashLoopBackOff: "back-off 10s restarting failed container=storage-provisioner pod=storage-provisioner_kube-system(c0dd39a9-a4fa-4097-8061-f4d356bedb93)"
	W0704 02:43:05.028592 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:19 old-k8s-version-610521 kubelet[660]: E0704 02:38:19.869765     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.031051 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:21 old-k8s-version-610521 kubelet[660]: E0704 02:38:21.530110     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:05.031888 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:26 old-k8s-version-610521 kubelet[660]: E0704 02:38:26.907175     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.032076 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:35 old-k8s-version-610521 kubelet[660]: E0704 02:38:35.521777     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.032408 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:39 old-k8s-version-610521 kubelet[660]: E0704 02:38:39.521443     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.032600 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:50 old-k8s-version-610521 kubelet[660]: E0704 02:38:50.534167     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.033201 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:53 old-k8s-version-610521 kubelet[660]: E0704 02:38:53.957826     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.033530 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:56 old-k8s-version-610521 kubelet[660]: E0704 02:38:56.906952     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.033751 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:01 old-k8s-version-610521 kubelet[660]: E0704 02:39:01.521480     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.034093 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:09 old-k8s-version-610521 kubelet[660]: E0704 02:39:09.529008     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.036785 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:13 old-k8s-version-610521 kubelet[660]: E0704 02:39:13.533305     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:05.037129 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:22 old-k8s-version-610521 kubelet[660]: E0704 02:39:22.524726     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.037319 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:25 old-k8s-version-610521 kubelet[660]: E0704 02:39:25.521615     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.037910 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:37 old-k8s-version-610521 kubelet[660]: E0704 02:39:37.096421     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.038096 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:38 old-k8s-version-610521 kubelet[660]: E0704 02:39:38.521726     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.038425 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:46 old-k8s-version-610521 kubelet[660]: E0704 02:39:46.906948     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.038609 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:53 old-k8s-version-610521 kubelet[660]: E0704 02:39:53.521738     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.038950 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:58 old-k8s-version-610521 kubelet[660]: E0704 02:39:58.521030     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.039136 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:06 old-k8s-version-610521 kubelet[660]: E0704 02:40:06.525857     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.039470 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:12 old-k8s-version-610521 kubelet[660]: E0704 02:40:12.520985     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.039670 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:19 old-k8s-version-610521 kubelet[660]: E0704 02:40:19.521316     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.040059 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:23 old-k8s-version-610521 kubelet[660]: E0704 02:40:23.521554     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.040255 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:32 old-k8s-version-610521 kubelet[660]: E0704 02:40:32.521292     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.040595 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:38 old-k8s-version-610521 kubelet[660]: E0704 02:40:38.521059     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.043060 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:44 old-k8s-version-610521 kubelet[660]: E0704 02:40:44.529636     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:05.043410 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:53 old-k8s-version-610521 kubelet[660]: E0704 02:40:53.523653     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.043603 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:57 old-k8s-version-610521 kubelet[660]: E0704 02:40:57.521996     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.044200 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:07 old-k8s-version-610521 kubelet[660]: E0704 02:41:07.423946     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.044391 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:09 old-k8s-version-610521 kubelet[660]: E0704 02:41:09.521565     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.044726 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:16 old-k8s-version-610521 kubelet[660]: E0704 02:41:16.907385     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.044912 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:21 old-k8s-version-610521 kubelet[660]: E0704 02:41:21.527466     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.045240 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:31 old-k8s-version-610521 kubelet[660]: E0704 02:41:31.524981     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.045425 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:32 old-k8s-version-610521 kubelet[660]: E0704 02:41:32.521592     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.045754 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:44 old-k8s-version-610521 kubelet[660]: E0704 02:41:44.521996     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.045938 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:44 old-k8s-version-610521 kubelet[660]: E0704 02:41:44.522125     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.046123 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:57 old-k8s-version-610521 kubelet[660]: E0704 02:41:57.521344     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.046456 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:57 old-k8s-version-610521 kubelet[660]: E0704 02:41:57.522153     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.046772 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:09 old-k8s-version-610521 kubelet[660]: E0704 02:42:09.521551     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.046977 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:09 old-k8s-version-610521 kubelet[660]: E0704 02:42:09.522234     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.047310 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:22 old-k8s-version-610521 kubelet[660]: E0704 02:42:22.521701     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.047501 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:22 old-k8s-version-610521 kubelet[660]: E0704 02:42:22.521892     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.047688 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:33 old-k8s-version-610521 kubelet[660]: E0704 02:42:33.521599     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.048022 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:36 old-k8s-version-610521 kubelet[660]: E0704 02:42:36.521477     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.048343 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521448     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.048573 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521725     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.048764 1461358 logs.go:138] Found kubelet problem: Jul 04 02:43:00 old-k8s-version-610521 kubelet[660]: E0704 02:43:00.521165     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.049095 1461358 logs.go:138] Found kubelet problem: Jul 04 02:43:01 old-k8s-version-610521 kubelet[660]: E0704 02:43:01.526471     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	I0704 02:43:05.049108 1461358 logs.go:123] Gathering logs for describe nodes ...
	I0704 02:43:05.049123 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.20.0/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	I0704 02:43:05.222743 1461358 logs.go:123] Gathering logs for coredns [71e03ac7da71796f0a3c61c8049ad7bacbefbec7a5934b306ad0b70906bb323f] ...
	I0704 02:43:05.222778 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 71e03ac7da71796f0a3c61c8049ad7bacbefbec7a5934b306ad0b70906bb323f"
	I0704 02:43:05.264679 1461358 logs.go:123] Gathering logs for kube-scheduler [50f1aed2fd69db9f1469ae8c509e044f5883a8af17b914e3ccea66e5159e538c] ...
	I0704 02:43:05.264706 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 50f1aed2fd69db9f1469ae8c509e044f5883a8af17b914e3ccea66e5159e538c"
	I0704 02:43:05.303780 1461358 logs.go:123] Gathering logs for kube-scheduler [c77b76d8ee8e90997b74a44444dda1ceb1606c1bcac6e2e897f099aefbae9ca5] ...
	I0704 02:43:05.303810 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 c77b76d8ee8e90997b74a44444dda1ceb1606c1bcac6e2e897f099aefbae9ca5"
	I0704 02:43:05.366573 1461358 logs.go:123] Gathering logs for kube-proxy [e36274ec135b818053c9b1ddec36771b772a75851df732aa1a4fa768cc912ac8] ...
	I0704 02:43:05.366613 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 e36274ec135b818053c9b1ddec36771b772a75851df732aa1a4fa768cc912ac8"
	I0704 02:43:05.405377 1461358 logs.go:123] Gathering logs for kindnet [ff0d8a658cd4232702700190007d62520a4a45872a028529b9ed36fe6dc3549f] ...
	I0704 02:43:05.405407 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 ff0d8a658cd4232702700190007d62520a4a45872a028529b9ed36fe6dc3549f"
	I0704 02:43:05.444053 1461358 logs.go:123] Gathering logs for storage-provisioner [946bdba0449bb980124c9355f39e4f28729385ded40518e3f52317b9f372be0a] ...
	I0704 02:43:05.444080 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 946bdba0449bb980124c9355f39e4f28729385ded40518e3f52317b9f372be0a"
	I0704 02:43:05.481109 1461358 logs.go:123] Gathering logs for dmesg ...
	I0704 02:43:05.481135 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0704 02:43:05.499896 1461358 logs.go:123] Gathering logs for etcd [8a8399e4a7b68b69943912a3d46d948be98f305a699513a7a8a124736077d249] ...
	I0704 02:43:05.499929 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8a8399e4a7b68b69943912a3d46d948be98f305a699513a7a8a124736077d249"
	I0704 02:43:05.550065 1461358 logs.go:123] Gathering logs for containerd ...
	I0704 02:43:05.550096 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u containerd -n 400"
	I0704 02:43:05.609341 1461358 out.go:304] Setting ErrFile to fd 2...
	I0704 02:43:05.609373 1461358 out.go:338] TERM=,COLORTERM=, which probably does not support color
	W0704 02:43:05.609437 1461358 out.go:239] X Problems detected in kubelet:
	W0704 02:43:05.609448 1461358 out.go:239]   Jul 04 02:42:36 old-k8s-version-610521 kubelet[660]: E0704 02:42:36.521477     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.609464 1461358 out.go:239]   Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521448     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.609474 1461358 out.go:239]   Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521725     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:05.609489 1461358 out.go:239]   Jul 04 02:43:00 old-k8s-version-610521 kubelet[660]: E0704 02:43:00.521165     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:05.609497 1461358 out.go:239]   Jul 04 02:43:01 old-k8s-version-610521 kubelet[660]: E0704 02:43:01.526471     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	I0704 02:43:05.609503 1461358 out.go:304] Setting ErrFile to fd 2...
	I0704 02:43:05.609509 1461358 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 02:43:07.707590 1470916 pod_ready.go:102] pod "metrics-server-569cc877fc-97mch" in "kube-system" namespace has status "Ready":"False"
	I0704 02:43:09.707793 1470916 pod_ready.go:102] pod "metrics-server-569cc877fc-97mch" in "kube-system" namespace has status "Ready":"False"
	I0704 02:43:12.206963 1470916 pod_ready.go:102] pod "metrics-server-569cc877fc-97mch" in "kube-system" namespace has status "Ready":"False"
	I0704 02:43:14.207327 1470916 pod_ready.go:102] pod "metrics-server-569cc877fc-97mch" in "kube-system" namespace has status "Ready":"False"
	I0704 02:43:16.212497 1470916 pod_ready.go:102] pod "metrics-server-569cc877fc-97mch" in "kube-system" namespace has status "Ready":"False"
	I0704 02:43:15.610562 1461358 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0704 02:43:15.631593 1461358 api_server.go:72] duration metric: took 5m49.987447376s to wait for apiserver process to appear ...
	I0704 02:43:15.631618 1461358 api_server.go:88] waiting for apiserver healthz status ...
	I0704 02:43:15.631652 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kube-apiserver Namespaces:[]}
	I0704 02:43:15.631712 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-apiserver
	I0704 02:43:15.680535 1461358 cri.go:89] found id: "60850fe005f0ab5cfc576b645d25d0daaadbd790e2fc616900504e76c1a1950d"
	I0704 02:43:15.680560 1461358 cri.go:89] found id: "186d8c36c000cb07d64c9a71d61cc5b1cf836660ec112c7bc99feeea6eb45bd1"
	I0704 02:43:15.680565 1461358 cri.go:89] found id: ""
	I0704 02:43:15.680572 1461358 logs.go:276] 2 containers: [60850fe005f0ab5cfc576b645d25d0daaadbd790e2fc616900504e76c1a1950d 186d8c36c000cb07d64c9a71d61cc5b1cf836660ec112c7bc99feeea6eb45bd1]
	I0704 02:43:15.680629 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.684592 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.688803 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:etcd Namespaces:[]}
	I0704 02:43:15.688888 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=etcd
	I0704 02:43:15.731139 1461358 cri.go:89] found id: "8a8399e4a7b68b69943912a3d46d948be98f305a699513a7a8a124736077d249"
	I0704 02:43:15.731166 1461358 cri.go:89] found id: "1332e9cb16fba0754aab5a90d7b820adf8d9a1d1c927c7ca3c4030879eae0e39"
	I0704 02:43:15.731180 1461358 cri.go:89] found id: ""
	I0704 02:43:15.731188 1461358 logs.go:276] 2 containers: [8a8399e4a7b68b69943912a3d46d948be98f305a699513a7a8a124736077d249 1332e9cb16fba0754aab5a90d7b820adf8d9a1d1c927c7ca3c4030879eae0e39]
	I0704 02:43:15.731247 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.735872 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.739703 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:coredns Namespaces:[]}
	I0704 02:43:15.739775 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=coredns
	I0704 02:43:15.830022 1461358 cri.go:89] found id: "58c28bfb15459945dd5649b964ec42a50cc8cbdc4feda5222594cdf64bf0bc09"
	I0704 02:43:15.830045 1461358 cri.go:89] found id: "71e03ac7da71796f0a3c61c8049ad7bacbefbec7a5934b306ad0b70906bb323f"
	I0704 02:43:15.830050 1461358 cri.go:89] found id: ""
	I0704 02:43:15.830057 1461358 logs.go:276] 2 containers: [58c28bfb15459945dd5649b964ec42a50cc8cbdc4feda5222594cdf64bf0bc09 71e03ac7da71796f0a3c61c8049ad7bacbefbec7a5934b306ad0b70906bb323f]
	I0704 02:43:15.830115 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.834360 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.838317 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kube-scheduler Namespaces:[]}
	I0704 02:43:15.838392 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-scheduler
	I0704 02:43:15.904704 1461358 cri.go:89] found id: "50f1aed2fd69db9f1469ae8c509e044f5883a8af17b914e3ccea66e5159e538c"
	I0704 02:43:15.904768 1461358 cri.go:89] found id: "c77b76d8ee8e90997b74a44444dda1ceb1606c1bcac6e2e897f099aefbae9ca5"
	I0704 02:43:15.904786 1461358 cri.go:89] found id: ""
	I0704 02:43:15.904811 1461358 logs.go:276] 2 containers: [50f1aed2fd69db9f1469ae8c509e044f5883a8af17b914e3ccea66e5159e538c c77b76d8ee8e90997b74a44444dda1ceb1606c1bcac6e2e897f099aefbae9ca5]
	I0704 02:43:15.904910 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.910048 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.916437 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kube-proxy Namespaces:[]}
	I0704 02:43:15.916611 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-proxy
	I0704 02:43:15.983932 1461358 cri.go:89] found id: "c87928a87c1d3d7d27dafbbb731a758e72e428732de7dc39c2c952505e65234f"
	I0704 02:43:15.984007 1461358 cri.go:89] found id: "e36274ec135b818053c9b1ddec36771b772a75851df732aa1a4fa768cc912ac8"
	I0704 02:43:15.984032 1461358 cri.go:89] found id: ""
	I0704 02:43:15.984055 1461358 logs.go:276] 2 containers: [c87928a87c1d3d7d27dafbbb731a758e72e428732de7dc39c2c952505e65234f e36274ec135b818053c9b1ddec36771b772a75851df732aa1a4fa768cc912ac8]
	I0704 02:43:15.984147 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.988958 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:15.993805 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kube-controller-manager Namespaces:[]}
	I0704 02:43:15.993928 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kube-controller-manager
	I0704 02:43:16.064713 1461358 cri.go:89] found id: "83d7189811f75b1bd3a98867c17d6570b9bfcd834d612431e0cb4e1fab3f4cb7"
	I0704 02:43:16.064789 1461358 cri.go:89] found id: "c1881aed94a4bf9ecfebcec75304d41e97b25974bd8a653d24b470be7bbaa222"
	I0704 02:43:16.064809 1461358 cri.go:89] found id: ""
	I0704 02:43:16.064834 1461358 logs.go:276] 2 containers: [83d7189811f75b1bd3a98867c17d6570b9bfcd834d612431e0cb4e1fab3f4cb7 c1881aed94a4bf9ecfebcec75304d41e97b25974bd8a653d24b470be7bbaa222]
	I0704 02:43:16.064924 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:16.069858 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:16.074563 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kindnet Namespaces:[]}
	I0704 02:43:16.074686 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kindnet
	I0704 02:43:16.134085 1461358 cri.go:89] found id: "e8761557ad0a992859e02050fbcb50c867e6456f15d43a1922860d58c918475e"
	I0704 02:43:16.134158 1461358 cri.go:89] found id: "ff0d8a658cd4232702700190007d62520a4a45872a028529b9ed36fe6dc3549f"
	I0704 02:43:16.134178 1461358 cri.go:89] found id: ""
	I0704 02:43:16.134201 1461358 logs.go:276] 2 containers: [e8761557ad0a992859e02050fbcb50c867e6456f15d43a1922860d58c918475e ff0d8a658cd4232702700190007d62520a4a45872a028529b9ed36fe6dc3549f]
	I0704 02:43:16.134291 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:16.138505 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:16.142556 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:storage-provisioner Namespaces:[]}
	I0704 02:43:16.142679 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=storage-provisioner
	I0704 02:43:16.206973 1461358 cri.go:89] found id: "a4760ff354cafc5cfc98e0cf499f32eac6195eb5b19a09c3b9252ff1ed6937a2"
	I0704 02:43:16.207048 1461358 cri.go:89] found id: "946bdba0449bb980124c9355f39e4f28729385ded40518e3f52317b9f372be0a"
	I0704 02:43:16.207067 1461358 cri.go:89] found id: ""
	I0704 02:43:16.207093 1461358 logs.go:276] 2 containers: [a4760ff354cafc5cfc98e0cf499f32eac6195eb5b19a09c3b9252ff1ed6937a2 946bdba0449bb980124c9355f39e4f28729385ded40518e3f52317b9f372be0a]
	I0704 02:43:16.207180 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:16.215336 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:16.219547 1461358 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name:kubernetes-dashboard Namespaces:[]}
	I0704 02:43:16.219673 1461358 ssh_runner.go:195] Run: sudo crictl ps -a --quiet --name=kubernetes-dashboard
	I0704 02:43:16.270945 1461358 cri.go:89] found id: "11d610a49bcd9d793ec0a3506ed0f5ea2c0c963cab7b6867ac5b304ba0ad0e3c"
	I0704 02:43:16.271020 1461358 cri.go:89] found id: ""
	I0704 02:43:16.271058 1461358 logs.go:276] 1 containers: [11d610a49bcd9d793ec0a3506ed0f5ea2c0c963cab7b6867ac5b304ba0ad0e3c]
	I0704 02:43:16.271135 1461358 ssh_runner.go:195] Run: which crictl
	I0704 02:43:16.275170 1461358 logs.go:123] Gathering logs for kube-apiserver [186d8c36c000cb07d64c9a71d61cc5b1cf836660ec112c7bc99feeea6eb45bd1] ...
	I0704 02:43:16.275276 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 186d8c36c000cb07d64c9a71d61cc5b1cf836660ec112c7bc99feeea6eb45bd1"
	I0704 02:43:16.355677 1461358 logs.go:123] Gathering logs for etcd [1332e9cb16fba0754aab5a90d7b820adf8d9a1d1c927c7ca3c4030879eae0e39] ...
	I0704 02:43:16.355755 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 1332e9cb16fba0754aab5a90d7b820adf8d9a1d1c927c7ca3c4030879eae0e39"
	I0704 02:43:16.415232 1461358 logs.go:123] Gathering logs for coredns [58c28bfb15459945dd5649b964ec42a50cc8cbdc4feda5222594cdf64bf0bc09] ...
	I0704 02:43:16.415389 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 58c28bfb15459945dd5649b964ec42a50cc8cbdc4feda5222594cdf64bf0bc09"
	I0704 02:43:16.481996 1461358 logs.go:123] Gathering logs for kube-scheduler [50f1aed2fd69db9f1469ae8c509e044f5883a8af17b914e3ccea66e5159e538c] ...
	I0704 02:43:16.482071 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 50f1aed2fd69db9f1469ae8c509e044f5883a8af17b914e3ccea66e5159e538c"
	I0704 02:43:16.547129 1461358 logs.go:123] Gathering logs for kube-scheduler [c77b76d8ee8e90997b74a44444dda1ceb1606c1bcac6e2e897f099aefbae9ca5] ...
	I0704 02:43:16.547207 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 c77b76d8ee8e90997b74a44444dda1ceb1606c1bcac6e2e897f099aefbae9ca5"
	I0704 02:43:16.606828 1461358 logs.go:123] Gathering logs for kube-proxy [c87928a87c1d3d7d27dafbbb731a758e72e428732de7dc39c2c952505e65234f] ...
	I0704 02:43:16.606995 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 c87928a87c1d3d7d27dafbbb731a758e72e428732de7dc39c2c952505e65234f"
	I0704 02:43:16.673552 1461358 logs.go:123] Gathering logs for kube-controller-manager [83d7189811f75b1bd3a98867c17d6570b9bfcd834d612431e0cb4e1fab3f4cb7] ...
	I0704 02:43:16.673619 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 83d7189811f75b1bd3a98867c17d6570b9bfcd834d612431e0cb4e1fab3f4cb7"
	I0704 02:43:16.783832 1461358 logs.go:123] Gathering logs for dmesg ...
	I0704 02:43:16.783909 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo dmesg -PH -L=never --level warn,err,crit,alert,emerg | tail -n 400"
	I0704 02:43:16.817561 1461358 logs.go:123] Gathering logs for kindnet [ff0d8a658cd4232702700190007d62520a4a45872a028529b9ed36fe6dc3549f] ...
	I0704 02:43:16.817602 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 ff0d8a658cd4232702700190007d62520a4a45872a028529b9ed36fe6dc3549f"
	I0704 02:43:16.871781 1461358 logs.go:123] Gathering logs for etcd [8a8399e4a7b68b69943912a3d46d948be98f305a699513a7a8a124736077d249] ...
	I0704 02:43:16.871853 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 8a8399e4a7b68b69943912a3d46d948be98f305a699513a7a8a124736077d249"
	I0704 02:43:16.932739 1461358 logs.go:123] Gathering logs for coredns [71e03ac7da71796f0a3c61c8049ad7bacbefbec7a5934b306ad0b70906bb323f] ...
	I0704 02:43:16.932780 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 71e03ac7da71796f0a3c61c8049ad7bacbefbec7a5934b306ad0b70906bb323f"
	I0704 02:43:16.990648 1461358 logs.go:123] Gathering logs for kube-proxy [e36274ec135b818053c9b1ddec36771b772a75851df732aa1a4fa768cc912ac8] ...
	I0704 02:43:16.990719 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 e36274ec135b818053c9b1ddec36771b772a75851df732aa1a4fa768cc912ac8"
	I0704 02:43:17.056592 1461358 logs.go:123] Gathering logs for storage-provisioner [a4760ff354cafc5cfc98e0cf499f32eac6195eb5b19a09c3b9252ff1ed6937a2] ...
	I0704 02:43:17.056668 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 a4760ff354cafc5cfc98e0cf499f32eac6195eb5b19a09c3b9252ff1ed6937a2"
	I0704 02:43:17.110694 1461358 logs.go:123] Gathering logs for containerd ...
	I0704 02:43:17.110722 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u containerd -n 400"
	I0704 02:43:17.187412 1461358 logs.go:123] Gathering logs for container status ...
	I0704 02:43:17.187449 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a"
	I0704 02:43:17.245188 1461358 logs.go:123] Gathering logs for describe nodes ...
	I0704 02:43:17.245216 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.20.0/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig"
	I0704 02:43:17.418440 1461358 logs.go:123] Gathering logs for kube-apiserver [60850fe005f0ab5cfc576b645d25d0daaadbd790e2fc616900504e76c1a1950d] ...
	I0704 02:43:17.418477 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 60850fe005f0ab5cfc576b645d25d0daaadbd790e2fc616900504e76c1a1950d"
	I0704 02:43:17.488189 1461358 logs.go:123] Gathering logs for kube-controller-manager [c1881aed94a4bf9ecfebcec75304d41e97b25974bd8a653d24b470be7bbaa222] ...
	I0704 02:43:17.488225 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 c1881aed94a4bf9ecfebcec75304d41e97b25974bd8a653d24b470be7bbaa222"
	I0704 02:43:17.584036 1461358 logs.go:123] Gathering logs for kindnet [e8761557ad0a992859e02050fbcb50c867e6456f15d43a1922860d58c918475e] ...
	I0704 02:43:17.584076 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 e8761557ad0a992859e02050fbcb50c867e6456f15d43a1922860d58c918475e"
	I0704 02:43:17.661684 1461358 logs.go:123] Gathering logs for kubernetes-dashboard [11d610a49bcd9d793ec0a3506ed0f5ea2c0c963cab7b6867ac5b304ba0ad0e3c] ...
	I0704 02:43:17.661717 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 11d610a49bcd9d793ec0a3506ed0f5ea2c0c963cab7b6867ac5b304ba0ad0e3c"
	I0704 02:43:17.713313 1461358 logs.go:123] Gathering logs for kubelet ...
	I0704 02:43:17.713341 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo journalctl -u kubelet -n 400"
	W0704 02:43:17.778411 1461358 logs.go:138] Found kubelet problem: Jul 04 02:37:43 old-k8s-version-610521 kubelet[660]: E0704 02:37:43.141075     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:17.778619 1461358 logs.go:138] Found kubelet problem: Jul 04 02:37:43 old-k8s-version-610521 kubelet[660]: E0704 02:37:43.711599     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.784363 1461358 logs.go:138] Found kubelet problem: Jul 04 02:37:58 old-k8s-version-610521 kubelet[660]: E0704 02:37:58.532033     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:17.784798 1461358 logs.go:138] Found kubelet problem: Jul 04 02:37:58 old-k8s-version-610521 kubelet[660]: E0704 02:37:58.584292     660 reflector.go:138] object-"kubernetes-dashboard"/"kubernetes-dashboard-token-hbf2h": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets "kubernetes-dashboard-token-hbf2h" is forbidden: User "system:node:old-k8s-version-610521" cannot list resource "secrets" in API group "" in the namespace "kubernetes-dashboard": no relationship found between node 'old-k8s-version-610521' and this object
	W0704 02:43:17.786715 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:05 old-k8s-version-610521 kubelet[660]: E0704 02:38:05.812067     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 10s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.787057 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:06 old-k8s-version-610521 kubelet[660]: E0704 02:38:06.816842     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 10s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.787387 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:07 old-k8s-version-610521 kubelet[660]: E0704 02:38:07.834643     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 10s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.790934 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:09 old-k8s-version-610521 kubelet[660]: E0704 02:38:09.521695     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.791791 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:14 old-k8s-version-610521 kubelet[660]: E0704 02:38:14.843471     660 pod_workers.go:191] Error syncing pod c0dd39a9-a4fa-4097-8061-f4d356bedb93 ("storage-provisioner_kube-system(c0dd39a9-a4fa-4097-8061-f4d356bedb93)"), skipping: failed to "StartContainer" for "storage-provisioner" with CrashLoopBackOff: "back-off 10s restarting failed container=storage-provisioner pod=storage-provisioner_kube-system(c0dd39a9-a4fa-4097-8061-f4d356bedb93)"
	W0704 02:43:17.792387 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:19 old-k8s-version-610521 kubelet[660]: E0704 02:38:19.869765     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.794882 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:21 old-k8s-version-610521 kubelet[660]: E0704 02:38:21.530110     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:17.795684 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:26 old-k8s-version-610521 kubelet[660]: E0704 02:38:26.907175     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.795871 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:35 old-k8s-version-610521 kubelet[660]: E0704 02:38:35.521777     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.796248 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:39 old-k8s-version-610521 kubelet[660]: E0704 02:38:39.521443     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.796439 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:50 old-k8s-version-610521 kubelet[660]: E0704 02:38:50.534167     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.797043 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:53 old-k8s-version-610521 kubelet[660]: E0704 02:38:53.957826     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.797375 1461358 logs.go:138] Found kubelet problem: Jul 04 02:38:56 old-k8s-version-610521 kubelet[660]: E0704 02:38:56.906952     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.797563 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:01 old-k8s-version-610521 kubelet[660]: E0704 02:39:01.521480     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.801144 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:09 old-k8s-version-610521 kubelet[660]: E0704 02:39:09.529008     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.804640 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:13 old-k8s-version-610521 kubelet[660]: E0704 02:39:13.533305     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:17.804986 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:22 old-k8s-version-610521 kubelet[660]: E0704 02:39:22.524726     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.805175 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:25 old-k8s-version-610521 kubelet[660]: E0704 02:39:25.521615     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.805764 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:37 old-k8s-version-610521 kubelet[660]: E0704 02:39:37.096421     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.805950 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:38 old-k8s-version-610521 kubelet[660]: E0704 02:39:38.521726     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.806299 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:46 old-k8s-version-610521 kubelet[660]: E0704 02:39:46.906948     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.806486 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:53 old-k8s-version-610521 kubelet[660]: E0704 02:39:53.521738     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.806834 1461358 logs.go:138] Found kubelet problem: Jul 04 02:39:58 old-k8s-version-610521 kubelet[660]: E0704 02:39:58.521030     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.807024 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:06 old-k8s-version-610521 kubelet[660]: E0704 02:40:06.525857     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.807356 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:12 old-k8s-version-610521 kubelet[660]: E0704 02:40:12.520985     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.807553 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:19 old-k8s-version-610521 kubelet[660]: E0704 02:40:19.521316     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.807894 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:23 old-k8s-version-610521 kubelet[660]: E0704 02:40:23.521554     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.808081 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:32 old-k8s-version-610521 kubelet[660]: E0704 02:40:32.521292     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.808410 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:38 old-k8s-version-610521 kubelet[660]: E0704 02:40:38.521059     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.810894 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:44 old-k8s-version-610521 kubelet[660]: E0704 02:40:44.529636     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	W0704 02:43:17.811227 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:53 old-k8s-version-610521 kubelet[660]: E0704 02:40:53.523653     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 1m20s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.811415 1461358 logs.go:138] Found kubelet problem: Jul 04 02:40:57 old-k8s-version-610521 kubelet[660]: E0704 02:40:57.521996     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.812929 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:07 old-k8s-version-610521 kubelet[660]: E0704 02:41:07.423946     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.813137 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:09 old-k8s-version-610521 kubelet[660]: E0704 02:41:09.521565     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.813470 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:16 old-k8s-version-610521 kubelet[660]: E0704 02:41:16.907385     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.813671 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:21 old-k8s-version-610521 kubelet[660]: E0704 02:41:21.527466     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.814011 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:31 old-k8s-version-610521 kubelet[660]: E0704 02:41:31.524981     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.814198 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:32 old-k8s-version-610521 kubelet[660]: E0704 02:41:32.521592     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.814530 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:44 old-k8s-version-610521 kubelet[660]: E0704 02:41:44.521996     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.814716 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:44 old-k8s-version-610521 kubelet[660]: E0704 02:41:44.522125     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.814908 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:57 old-k8s-version-610521 kubelet[660]: E0704 02:41:57.521344     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.815237 1461358 logs.go:138] Found kubelet problem: Jul 04 02:41:57 old-k8s-version-610521 kubelet[660]: E0704 02:41:57.522153     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.815565 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:09 old-k8s-version-610521 kubelet[660]: E0704 02:42:09.521551     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.815765 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:09 old-k8s-version-610521 kubelet[660]: E0704 02:42:09.522234     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.816099 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:22 old-k8s-version-610521 kubelet[660]: E0704 02:42:22.521701     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.816335 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:22 old-k8s-version-610521 kubelet[660]: E0704 02:42:22.521892     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.816533 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:33 old-k8s-version-610521 kubelet[660]: E0704 02:42:33.521599     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.816863 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:36 old-k8s-version-610521 kubelet[660]: E0704 02:42:36.521477     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.817182 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521448     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.817381 1461358 logs.go:138] Found kubelet problem: Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521725     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.817566 1461358 logs.go:138] Found kubelet problem: Jul 04 02:43:00 old-k8s-version-610521 kubelet[660]: E0704 02:43:00.521165     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.817907 1461358 logs.go:138] Found kubelet problem: Jul 04 02:43:01 old-k8s-version-610521 kubelet[660]: E0704 02:43:01.526471     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.818093 1461358 logs.go:138] Found kubelet problem: Jul 04 02:43:14 old-k8s-version-610521 kubelet[660]: E0704 02:43:14.521315     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.818422 1461358 logs.go:138] Found kubelet problem: Jul 04 02:43:16 old-k8s-version-610521 kubelet[660]: E0704 02:43:16.521001     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	I0704 02:43:17.818434 1461358 logs.go:123] Gathering logs for storage-provisioner [946bdba0449bb980124c9355f39e4f28729385ded40518e3f52317b9f372be0a] ...
	I0704 02:43:17.818451 1461358 ssh_runner.go:195] Run: /bin/bash -c "sudo /usr/bin/crictl logs --tail 400 946bdba0449bb980124c9355f39e4f28729385ded40518e3f52317b9f372be0a"
	I0704 02:43:17.868279 1461358 out.go:304] Setting ErrFile to fd 2...
	I0704 02:43:17.868304 1461358 out.go:338] TERM=,COLORTERM=, which probably does not support color
	W0704 02:43:17.868349 1461358 out.go:239] X Problems detected in kubelet:
	W0704 02:43:17.868360 1461358 out.go:239]   Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521725     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.868371 1461358 out.go:239]   Jul 04 02:43:00 old-k8s-version-610521 kubelet[660]: E0704 02:43:00.521165     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.868378 1461358 out.go:239]   Jul 04 02:43:01 old-k8s-version-610521 kubelet[660]: E0704 02:43:01.526471     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	W0704 02:43:17.868384 1461358 out.go:239]   Jul 04 02:43:14 old-k8s-version-610521 kubelet[660]: E0704 02:43:14.521315     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	W0704 02:43:17.868394 1461358 out.go:239]   Jul 04 02:43:16 old-k8s-version-610521 kubelet[660]: E0704 02:43:16.521001     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	I0704 02:43:17.868415 1461358 out.go:304] Setting ErrFile to fd 2...
	I0704 02:43:17.868420 1461358 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 02:43:18.709537 1470916 pod_ready.go:102] pod "metrics-server-569cc877fc-97mch" in "kube-system" namespace has status "Ready":"False"
	I0704 02:43:21.206248 1470916 pod_ready.go:102] pod "metrics-server-569cc877fc-97mch" in "kube-system" namespace has status "Ready":"False"
	I0704 02:43:23.209211 1470916 pod_ready.go:102] pod "metrics-server-569cc877fc-97mch" in "kube-system" namespace has status "Ready":"False"
	I0704 02:43:25.709967 1470916 pod_ready.go:102] pod "metrics-server-569cc877fc-97mch" in "kube-system" namespace has status "Ready":"False"
	I0704 02:43:27.869804 1461358 api_server.go:253] Checking apiserver healthz at https://192.168.76.2:8443/healthz ...
	I0704 02:43:27.879007 1461358 api_server.go:279] https://192.168.76.2:8443/healthz returned 200:
	ok
	I0704 02:43:27.880655 1461358 out.go:177] 
	W0704 02:43:27.882680 1461358 out.go:239] X Exiting due to K8S_UNHEALTHY_CONTROL_PLANE: wait 6m0s for node: wait for healthy API server: controlPlane never updated to v1.20.0
	W0704 02:43:27.882719 1461358 out.go:239] * Suggestion: Control Plane could not update, try minikube delete --all --purge
	W0704 02:43:27.882761 1461358 out.go:239] * Related issue: https://github.com/kubernetes/minikube/issues/11417
	W0704 02:43:27.882767 1461358 out.go:239] * 
	W0704 02:43:27.883743 1461358 out.go:239] ╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯
	I0704 02:43:27.886072 1461358 out.go:177] 
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED             STATE               NAME                        ATTEMPT             POD ID              POD
	0dce99ee6be65       523cad1a4df73       2 minutes ago       Exited              dashboard-metrics-scraper   5                   05fa7562145cc       dashboard-metrics-scraper-8d5bb5db8-rxdl6
	a4760ff354caf       ba04bb24b9575       5 minutes ago       Running             storage-provisioner         2                   8f16543d07609       storage-provisioner
	11d610a49bcd9       20b332c9a70d8       5 minutes ago       Running             kubernetes-dashboard        0                   ce03ad4f30e91       kubernetes-dashboard-cd95d586-xnqcr
	a8e3d171aa157       1611cd07b61d5       5 minutes ago       Running             busybox                     1                   b96715275719b       busybox
	946bdba0449bb       ba04bb24b9575       5 minutes ago       Exited              storage-provisioner         1                   8f16543d07609       storage-provisioner
	58c28bfb15459       db91994f4ee8f       5 minutes ago       Running             coredns                     1                   726dc9563624f       coredns-74ff55c5b-r7qv7
	e8761557ad0a9       89d73d416b992       5 minutes ago       Running             kindnet-cni                 1                   d832e0df70e6c       kindnet-4fp4t
	c87928a87c1d3       25a5233254979       5 minutes ago       Running             kube-proxy                  1                   7383567b2be94       kube-proxy-q4c98
	83d7189811f75       1df8a2b116bd1       5 minutes ago       Running             kube-controller-manager     1                   eac3b241131f7       kube-controller-manager-old-k8s-version-610521
	50f1aed2fd69d       e7605f88f17d6       5 minutes ago       Running             kube-scheduler              1                   68595857752ac       kube-scheduler-old-k8s-version-610521
	60850fe005f0a       2c08bbbc02d3a       5 minutes ago       Running             kube-apiserver              1                   b44b33fa02778       kube-apiserver-old-k8s-version-610521
	8a8399e4a7b68       05b738aa1bc63       5 minutes ago       Running             etcd                        1                   008eb6d5b87bc       etcd-old-k8s-version-610521
	a136b72adddc0       1611cd07b61d5       6 minutes ago       Exited              busybox                     0                   ec7514c4bf122       busybox
	71e03ac7da717       db91994f4ee8f       8 minutes ago       Exited              coredns                     0                   a40c9ded32470       coredns-74ff55c5b-r7qv7
	ff0d8a658cd42       89d73d416b992       8 minutes ago       Exited              kindnet-cni                 0                   4144b7fbc59c8       kindnet-4fp4t
	e36274ec135b8       25a5233254979       8 minutes ago       Exited              kube-proxy                  0                   2e16f6e111ac1       kube-proxy-q4c98
	1332e9cb16fba       05b738aa1bc63       8 minutes ago       Exited              etcd                        0                   58c433105f0bd       etcd-old-k8s-version-610521
	186d8c36c000c       2c08bbbc02d3a       8 minutes ago       Exited              kube-apiserver              0                   483d28cce47ae       kube-apiserver-old-k8s-version-610521
	c1881aed94a4b       1df8a2b116bd1       8 minutes ago       Exited              kube-controller-manager     0                   e98153960e839       kube-controller-manager-old-k8s-version-610521
	c77b76d8ee8e9       e7605f88f17d6       8 minutes ago       Exited              kube-scheduler              0                   2d1c1d90b8ea7       kube-scheduler-old-k8s-version-610521
	
	
	==> containerd <==
	Jul 04 02:39:36 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:39:36.542744790Z" level=info msg="CreateContainer within sandbox \"05fa7562145cc451208b8c734015d1a35e5fd5ce32b989fd8adaa204dada6b19\" for name:\"dashboard-metrics-scraper\"  attempt:4 returns container id \"d506437e81c289d19c7ade2e5ac5ea8ddd1efd382ba0f73024892d150cbb4201\""
	Jul 04 02:39:36 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:39:36.543957866Z" level=info msg="StartContainer for \"d506437e81c289d19c7ade2e5ac5ea8ddd1efd382ba0f73024892d150cbb4201\""
	Jul 04 02:39:36 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:39:36.621803114Z" level=info msg="StartContainer for \"d506437e81c289d19c7ade2e5ac5ea8ddd1efd382ba0f73024892d150cbb4201\" returns successfully"
	Jul 04 02:39:36 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:39:36.661806272Z" level=info msg="shim disconnected" id=d506437e81c289d19c7ade2e5ac5ea8ddd1efd382ba0f73024892d150cbb4201 namespace=k8s.io
	Jul 04 02:39:36 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:39:36.661867186Z" level=warning msg="cleaning up after shim disconnected" id=d506437e81c289d19c7ade2e5ac5ea8ddd1efd382ba0f73024892d150cbb4201 namespace=k8s.io
	Jul 04 02:39:36 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:39:36.661879148Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Jul 04 02:39:37 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:39:37.099699057Z" level=info msg="RemoveContainer for \"2002ce2abb9bb01eea72b24839edd7c7aad060d1903363102aabf21e7ec1f336\""
	Jul 04 02:39:37 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:39:37.106781414Z" level=info msg="RemoveContainer for \"2002ce2abb9bb01eea72b24839edd7c7aad060d1903363102aabf21e7ec1f336\" returns successfully"
	Jul 04 02:40:44 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:40:44.522114112Z" level=info msg="PullImage \"fake.domain/registry.k8s.io/echoserver:1.4\""
	Jul 04 02:40:44 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:40:44.527674920Z" level=info msg="trying next host" error="failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host" host=fake.domain
	Jul 04 02:40:44 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:40:44.529071016Z" level=error msg="PullImage \"fake.domain/registry.k8s.io/echoserver:1.4\" failed" error="failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	Jul 04 02:40:44 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:40:44.529233769Z" level=info msg="stop pulling image fake.domain/registry.k8s.io/echoserver:1.4: active requests=0, bytes read=0"
	Jul 04 02:41:06 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:41:06.524719466Z" level=info msg="CreateContainer within sandbox \"05fa7562145cc451208b8c734015d1a35e5fd5ce32b989fd8adaa204dada6b19\" for container name:\"dashboard-metrics-scraper\"  attempt:5"
	Jul 04 02:41:06 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:41:06.558119362Z" level=info msg="CreateContainer within sandbox \"05fa7562145cc451208b8c734015d1a35e5fd5ce32b989fd8adaa204dada6b19\" for name:\"dashboard-metrics-scraper\"  attempt:5 returns container id \"0dce99ee6be65fe7ea4530a95d0f1f7baf7bf003e473a5c867e2b2d54a4808a1\""
	Jul 04 02:41:06 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:41:06.558709593Z" level=info msg="StartContainer for \"0dce99ee6be65fe7ea4530a95d0f1f7baf7bf003e473a5c867e2b2d54a4808a1\""
	Jul 04 02:41:06 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:41:06.718254170Z" level=info msg="StartContainer for \"0dce99ee6be65fe7ea4530a95d0f1f7baf7bf003e473a5c867e2b2d54a4808a1\" returns successfully"
	Jul 04 02:41:06 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:41:06.782036144Z" level=info msg="shim disconnected" id=0dce99ee6be65fe7ea4530a95d0f1f7baf7bf003e473a5c867e2b2d54a4808a1 namespace=k8s.io
	Jul 04 02:41:06 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:41:06.782375279Z" level=warning msg="cleaning up after shim disconnected" id=0dce99ee6be65fe7ea4530a95d0f1f7baf7bf003e473a5c867e2b2d54a4808a1 namespace=k8s.io
	Jul 04 02:41:06 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:41:06.782474403Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Jul 04 02:41:07 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:41:07.440422423Z" level=info msg="RemoveContainer for \"d506437e81c289d19c7ade2e5ac5ea8ddd1efd382ba0f73024892d150cbb4201\""
	Jul 04 02:41:07 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:41:07.447009112Z" level=info msg="RemoveContainer for \"d506437e81c289d19c7ade2e5ac5ea8ddd1efd382ba0f73024892d150cbb4201\" returns successfully"
	Jul 04 02:43:26 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:43:26.522226840Z" level=info msg="PullImage \"fake.domain/registry.k8s.io/echoserver:1.4\""
	Jul 04 02:43:26 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:43:26.526905888Z" level=info msg="trying next host" error="failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host" host=fake.domain
	Jul 04 02:43:26 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:43:26.528492626Z" level=error msg="PullImage \"fake.domain/registry.k8s.io/echoserver:1.4\" failed" error="failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	Jul 04 02:43:26 old-k8s-version-610521 containerd[571]: time="2024-07-04T02:43:26.528549084Z" level=info msg="stop pulling image fake.domain/registry.k8s.io/echoserver:1.4: active requests=0, bytes read=0"
	
	
	==> coredns [58c28bfb15459945dd5649b964ec42a50cc8cbdc4feda5222594cdf64bf0bc09] <==
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	.:53
	[INFO] plugin/reload: Running configuration MD5 = b494d968e357ba1b925cee838fbd78ed
	CoreDNS-1.7.0
	linux/arm64, go1.14.4, f59c03d
	[INFO] 127.0.0.1:55721 - 43490 "HINFO IN 5982100861046839276.3908761463056302231. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.020666161s
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	I0704 02:38:13.696627       1 trace.go:116] Trace[2019727887]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.18.3/tools/cache/reflector.go:125 (started: 2024-07-04 02:37:43.695222354 +0000 UTC m=+0.097403862) (total time: 30.001294876s):
	Trace[2019727887]: [30.001294876s] [30.001294876s] END
	E0704 02:38:13.696657       1 reflector.go:178] pkg/mod/k8s.io/client-go@v0.18.3/tools/cache/reflector.go:125: Failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0704 02:38:13.697333       1 trace.go:116] Trace[939984059]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.18.3/tools/cache/reflector.go:125 (started: 2024-07-04 02:37:43.696887547 +0000 UTC m=+0.099069055) (total time: 30.000426054s):
	Trace[939984059]: [30.000426054s] [30.000426054s] END
	E0704 02:38:13.697347       1 reflector.go:178] pkg/mod/k8s.io/client-go@v0.18.3/tools/cache/reflector.go:125: Failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0704 02:38:13.697893       1 trace.go:116] Trace[1474941318]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.18.3/tools/cache/reflector.go:125 (started: 2024-07-04 02:37:43.697429418 +0000 UTC m=+0.099610918) (total time: 30.000447944s):
	Trace[1474941318]: [30.000447944s] [30.000447944s] END
	E0704 02:38:13.697905       1 reflector.go:178] pkg/mod/k8s.io/client-go@v0.18.3/tools/cache/reflector.go:125: Failed to list *v1.Endpoints: Get "https://10.96.0.1:443/api/v1/endpoints?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	
	
	==> coredns [71e03ac7da71796f0a3c61c8049ad7bacbefbec7a5934b306ad0b70906bb323f] <==
	.:53
	[INFO] plugin/reload: Running configuration MD5 = b494d968e357ba1b925cee838fbd78ed
	CoreDNS-1.7.0
	linux/arm64, go1.14.4, f59c03d
	[INFO] 127.0.0.1:44707 - 6167 "HINFO IN 7557065838368932784.5582643477403807696. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.022185845s
	
	
	==> describe nodes <==
	Name:               old-k8s-version-610521
	Roles:              control-plane,master
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=old-k8s-version-610521
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=b003e6195fd8aae2e8757a7316e2960f465339c8
	                    minikube.k8s.io/name=old-k8s-version-610521
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_07_04T02_34_48_0700
	                    minikube.k8s.io/version=v1.33.1
	                    node-role.kubernetes.io/control-plane=
	                    node-role.kubernetes.io/master=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: /run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Thu, 04 Jul 2024 02:34:43 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  old-k8s-version-610521
	  AcquireTime:     <unset>
	  RenewTime:       Thu, 04 Jul 2024 02:43:25 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Thu, 04 Jul 2024 02:38:42 +0000   Thu, 04 Jul 2024 02:34:34 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Thu, 04 Jul 2024 02:38:42 +0000   Thu, 04 Jul 2024 02:34:34 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Thu, 04 Jul 2024 02:38:42 +0000   Thu, 04 Jul 2024 02:34:34 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Thu, 04 Jul 2024 02:38:42 +0000   Thu, 04 Jul 2024 02:35:03 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.76.2
	  Hostname:    old-k8s-version-610521
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022360Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022360Ki
	  pods:               110
	System Info:
	  Machine ID:                 8150af567b314addabbc3f296dfaa4cd
	  System UUID:                91b0bbc2-4d56-460f-a820-d502fb01bf80
	  Boot ID:                    8f650b57-d36f-4952-bd7f-5577bab5f375
	  Kernel Version:             5.15.0-1064-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.18
	  Kubelet Version:            v1.20.0
	  Kube-Proxy Version:         v1.20.0
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (12 in total)
	  Namespace                   Name                                              CPU Requests  CPU Limits  Memory Requests  Memory Limits  AGE
	  ---------                   ----                                              ------------  ----------  ---------------  -------------  ---
	  default                     busybox                                           0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         6m32s
	  kube-system                 coredns-74ff55c5b-r7qv7                           100m (5%!)(MISSING)     0 (0%!)(MISSING)      70Mi (0%!)(MISSING)        170Mi (2%!)(MISSING)     8m26s
	  kube-system                 etcd-old-k8s-version-610521                       100m (5%!)(MISSING)     0 (0%!)(MISSING)      100Mi (1%!)(MISSING)       0 (0%!)(MISSING)         8m33s
	  kube-system                 kindnet-4fp4t                                     100m (5%!)(MISSING)     100m (5%!)(MISSING)   50Mi (0%!)(MISSING)        50Mi (0%!)(MISSING)      8m26s
	  kube-system                 kube-apiserver-old-k8s-version-610521             250m (12%!)(MISSING)    0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         8m33s
	  kube-system                 kube-controller-manager-old-k8s-version-610521    200m (10%!)(MISSING)    0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         8m33s
	  kube-system                 kube-proxy-q4c98                                  0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         8m26s
	  kube-system                 kube-scheduler-old-k8s-version-610521             100m (5%!)(MISSING)     0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         8m33s
	  kube-system                 metrics-server-9975d5f86-hvdg7                    100m (5%!)(MISSING)     0 (0%!)(MISSING)      200Mi (2%!)(MISSING)       0 (0%!)(MISSING)         6m23s
	  kube-system                 storage-provisioner                               0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         8m24s
	  kubernetes-dashboard        dashboard-metrics-scraper-8d5bb5db8-rxdl6         0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         5m31s
	  kubernetes-dashboard        kubernetes-dashboard-cd95d586-xnqcr               0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         5m31s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                950m (47%!)(MISSING)  100m (5%!)(MISSING)
	  memory             420Mi (5%!)(MISSING)  220Mi (2%!)(MISSING)
	  ephemeral-storage  100Mi (0%!)(MISSING)  0 (0%!)(MISSING)
	  hugepages-1Gi      0 (0%!)(MISSING)      0 (0%!)(MISSING)
	  hugepages-2Mi      0 (0%!)(MISSING)      0 (0%!)(MISSING)
	  hugepages-32Mi     0 (0%!)(MISSING)      0 (0%!)(MISSING)
	  hugepages-64Ki     0 (0%!)(MISSING)      0 (0%!)(MISSING)
	Events:
	  Type    Reason                   Age                    From        Message
	  ----    ------                   ----                   ----        -------
	  Normal  NodeHasSufficientMemory  8m58s (x6 over 8m58s)  kubelet     Node old-k8s-version-610521 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    8m58s (x6 over 8m58s)  kubelet     Node old-k8s-version-610521 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     8m58s (x6 over 8m58s)  kubelet     Node old-k8s-version-610521 status is now: NodeHasSufficientPID
	  Normal  Starting                 8m33s                  kubelet     Starting kubelet.
	  Normal  NodeHasSufficientMemory  8m33s                  kubelet     Node old-k8s-version-610521 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    8m33s                  kubelet     Node old-k8s-version-610521 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     8m33s                  kubelet     Node old-k8s-version-610521 status is now: NodeHasSufficientPID
	  Normal  NodeAllocatableEnforced  8m33s                  kubelet     Updated Node Allocatable limit across pods
	  Normal  NodeReady                8m26s                  kubelet     Node old-k8s-version-610521 status is now: NodeReady
	  Normal  Starting                 8m25s                  kube-proxy  Starting kube-proxy.
	  Normal  Starting                 5m56s                  kubelet     Starting kubelet.
	  Normal  NodeHasSufficientMemory  5m56s (x8 over 5m56s)  kubelet     Node old-k8s-version-610521 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    5m56s (x8 over 5m56s)  kubelet     Node old-k8s-version-610521 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     5m56s (x7 over 5m56s)  kubelet     Node old-k8s-version-610521 status is now: NodeHasSufficientPID
	  Normal  NodeAllocatableEnforced  5m56s                  kubelet     Updated Node Allocatable limit across pods
	  Normal  Starting                 5m45s                  kube-proxy  Starting kube-proxy.
	
	
	==> dmesg <==
	[  +0.001047] FS-Cache: O-key=[8] '3373ed0000000000'
	[  +0.000710] FS-Cache: N-cookie c=00000042 [p=00000039 fl=2 nc=0 na=1]
	[  +0.000925] FS-Cache: N-cookie d=00000000915207cd{9p.inode} n=00000000f6098f89
	[  +0.001031] FS-Cache: N-key=[8] '3373ed0000000000'
	[  +0.003157] FS-Cache: Duplicate cookie detected
	[  +0.000648] FS-Cache: O-cookie c=0000003c [p=00000039 fl=226 nc=0 na=1]
	[  +0.000906] FS-Cache: O-cookie d=00000000915207cd{9p.inode} n=000000007ffd0c87
	[  +0.001010] FS-Cache: O-key=[8] '3373ed0000000000'
	[  +0.000673] FS-Cache: N-cookie c=00000043 [p=00000039 fl=2 nc=0 na=1]
	[  +0.000875] FS-Cache: N-cookie d=00000000915207cd{9p.inode} n=00000000b09744a6
	[  +0.000987] FS-Cache: N-key=[8] '3373ed0000000000'
	[  +2.090097] FS-Cache: Duplicate cookie detected
	[  +0.000908] FS-Cache: O-cookie c=0000003a [p=00000039 fl=226 nc=0 na=1]
	[  +0.001012] FS-Cache: O-cookie d=00000000915207cd{9p.inode} n=0000000007b93ca1
	[  +0.001103] FS-Cache: O-key=[8] '3273ed0000000000'
	[  +0.000725] FS-Cache: N-cookie c=00000045 [p=00000039 fl=2 nc=0 na=1]
	[  +0.001087] FS-Cache: N-cookie d=00000000915207cd{9p.inode} n=00000000f6098f89
	[  +0.001175] FS-Cache: N-key=[8] '3273ed0000000000'
	[  +0.393224] FS-Cache: Duplicate cookie detected
	[  +0.000691] FS-Cache: O-cookie c=0000003f [p=00000039 fl=226 nc=0 na=1]
	[  +0.000960] FS-Cache: O-cookie d=00000000915207cd{9p.inode} n=00000000c1ecc0ff
	[  +0.001038] FS-Cache: O-key=[8] '3873ed0000000000'
	[  +0.000724] FS-Cache: N-cookie c=00000046 [p=00000039 fl=2 nc=0 na=1]
	[  +0.000927] FS-Cache: N-cookie d=00000000915207cd{9p.inode} n=0000000028b8456e
	[  +0.001028] FS-Cache: N-key=[8] '3873ed0000000000'
	
	
	==> etcd [1332e9cb16fba0754aab5a90d7b820adf8d9a1d1c927c7ca3c4030879eae0e39] <==
	raft2024/07/04 02:34:33 INFO: ea7e25599daad906 became candidate at term 2
	raft2024/07/04 02:34:33 INFO: ea7e25599daad906 received MsgVoteResp from ea7e25599daad906 at term 2
	raft2024/07/04 02:34:33 INFO: ea7e25599daad906 became leader at term 2
	raft2024/07/04 02:34:33 INFO: raft.node: ea7e25599daad906 elected leader ea7e25599daad906 at term 2
	2024-07-04 02:34:33.126466 I | etcdserver: setting up the initial cluster version to 3.4
	2024-07-04 02:34:33.128582 N | etcdserver/membership: set the initial cluster version to 3.4
	2024-07-04 02:34:33.128970 I | etcdserver/api: enabled capabilities for version 3.4
	2024-07-04 02:34:33.129159 I | etcdserver: published {Name:old-k8s-version-610521 ClientURLs:[https://192.168.76.2:2379]} to cluster 6f20f2c4b2fb5f8a
	2024-07-04 02:34:33.129547 I | embed: ready to serve client requests
	2024-07-04 02:34:33.136070 I | embed: serving client requests on 127.0.0.1:2379
	2024-07-04 02:34:33.136338 I | embed: ready to serve client requests
	2024-07-04 02:34:33.143427 I | embed: serving client requests on 192.168.76.2:2379
	2024-07-04 02:35:05.837744 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:35:08.455266 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:35:18.455152 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:35:28.455309 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:35:38.455303 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:35:48.455306 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:35:58.455108 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:36:08.455146 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:36:18.455242 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:36:28.455238 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:36:38.455148 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:36:48.455247 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:36:58.455402 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	
	
	==> etcd [8a8399e4a7b68b69943912a3d46d948be98f305a699513a7a8a124736077d249] <==
	2024-07-04 02:39:23.586423 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:39:33.586395 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:39:43.586286 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:39:53.586415 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:40:03.586277 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:40:13.586561 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:40:23.586894 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:40:33.586266 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:40:43.586439 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:40:53.586436 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:41:03.586487 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:41:13.591657 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:41:23.586348 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:41:33.587200 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:41:43.586437 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:41:53.586475 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:42:03.586361 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:42:13.586412 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:42:23.586350 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:42:33.586823 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:42:43.586273 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:42:53.586481 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:43:03.586443 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:43:13.586446 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2024-07-04 02:43:23.586467 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	
	
	==> kernel <==
	 02:43:29 up  8:25,  0 users,  load average: 1.11, 1.78, 2.49
	Linux old-k8s-version-610521 5.15.0-1064-aws #70~20.04.1-Ubuntu SMP Thu Jun 27 14:52:48 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [e8761557ad0a992859e02050fbcb50c867e6456f15d43a1922860d58c918475e] <==
	I0704 02:41:24.195190       1 main.go:227] handling current node
	I0704 02:41:34.200566       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:41:34.200596       1 main.go:227] handling current node
	I0704 02:41:44.206586       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:41:44.206720       1 main.go:227] handling current node
	I0704 02:41:54.218422       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:41:54.218455       1 main.go:227] handling current node
	I0704 02:42:04.233800       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:42:04.233832       1 main.go:227] handling current node
	I0704 02:42:14.334742       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:42:14.334777       1 main.go:227] handling current node
	I0704 02:42:24.452789       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:42:24.452943       1 main.go:227] handling current node
	I0704 02:42:34.584213       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:42:34.584411       1 main.go:227] handling current node
	I0704 02:42:44.680555       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:42:44.680647       1 main.go:227] handling current node
	I0704 02:42:54.913309       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:42:54.913342       1 main.go:227] handling current node
	I0704 02:43:04.978712       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:43:04.978742       1 main.go:227] handling current node
	I0704 02:43:15.115373       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:43:15.115553       1 main.go:227] handling current node
	I0704 02:43:25.196183       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:43:25.196400       1 main.go:227] handling current node
	
	
	==> kindnet [ff0d8a658cd4232702700190007d62520a4a45872a028529b9ed36fe6dc3549f] <==
	I0704 02:35:06.040541       1 main.go:150] noMask IPv4 subnets: [10.244.0.0/16]
	I0704 02:35:06.344416       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:35:06.344450       1 main.go:227] handling current node
	I0704 02:35:16.372661       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:35:16.372886       1 main.go:227] handling current node
	I0704 02:35:26.384391       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:35:26.384422       1 main.go:227] handling current node
	I0704 02:35:36.402170       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:35:36.402201       1 main.go:227] handling current node
	I0704 02:35:46.413469       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:35:46.413500       1 main.go:227] handling current node
	I0704 02:35:56.433683       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:35:56.433947       1 main.go:227] handling current node
	I0704 02:36:06.473317       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:36:06.473414       1 main.go:227] handling current node
	I0704 02:36:16.499849       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:36:16.499877       1 main.go:227] handling current node
	I0704 02:36:26.532634       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:36:26.532913       1 main.go:227] handling current node
	I0704 02:36:36.551950       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:36:36.551991       1 main.go:227] handling current node
	I0704 02:36:46.579403       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:36:46.579553       1 main.go:227] handling current node
	I0704 02:36:56.598094       1 main.go:223] Handling node with IPs: map[192.168.76.2:{}]
	I0704 02:36:56.598123       1 main.go:227] handling current node
	
	
	==> kube-apiserver [186d8c36c000cb07d64c9a71d61cc5b1cf836660ec112c7bc99feeea6eb45bd1] <==
	I0704 02:34:44.007955       1 controller.go:132] OpenAPI AggregationController: action for item : Nothing (removed from the queue).
	I0704 02:34:44.007992       1 controller.go:132] OpenAPI AggregationController: action for item k8s_internal_local_delegation_chain_0000000000: Nothing (removed from the queue).
	I0704 02:34:44.044170       1 storage_scheduling.go:132] created PriorityClass system-node-critical with value 2000001000
	I0704 02:34:44.084457       1 storage_scheduling.go:132] created PriorityClass system-cluster-critical with value 2000000000
	I0704 02:34:44.084488       1 storage_scheduling.go:148] all system priority classes are created successfully or already exist.
	I0704 02:34:45.506185       1 controller.go:606] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0704 02:34:45.564485       1 controller.go:606] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	W0704 02:34:45.688635       1 lease.go:233] Resetting endpoints for master service "kubernetes" to [192.168.76.2]
	I0704 02:34:45.690288       1 controller.go:606] quota admission added evaluator for: endpoints
	I0704 02:34:45.695727       1 controller.go:606] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0704 02:34:46.893142       1 controller.go:606] quota admission added evaluator for: serviceaccounts
	I0704 02:34:47.685894       1 controller.go:606] quota admission added evaluator for: deployments.apps
	I0704 02:34:47.769958       1 controller.go:606] quota admission added evaluator for: daemonsets.apps
	I0704 02:34:56.366400       1 controller.go:606] quota admission added evaluator for: leases.coordination.k8s.io
	I0704 02:35:03.123230       1 controller.go:606] quota admission added evaluator for: replicasets.apps
	I0704 02:35:03.460691       1 controller.go:606] quota admission added evaluator for: controllerrevisions.apps
	I0704 02:35:14.804409       1 client.go:360] parsed scheme: "passthrough"
	I0704 02:35:14.804545       1 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{https://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
	I0704 02:35:14.804590       1 clientconn.go:948] ClientConn switching balancer to "pick_first"
	I0704 02:35:58.501770       1 client.go:360] parsed scheme: "passthrough"
	I0704 02:35:58.502006       1 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{https://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
	I0704 02:35:58.502022       1 clientconn.go:948] ClientConn switching balancer to "pick_first"
	I0704 02:36:34.938445       1 client.go:360] parsed scheme: "passthrough"
	I0704 02:36:34.938514       1 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{https://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
	I0704 02:36:34.938524       1 clientconn.go:948] ClientConn switching balancer to "pick_first"
	
	
	==> kube-apiserver [60850fe005f0ab5cfc576b645d25d0daaadbd790e2fc616900504e76c1a1950d] <==
	I0704 02:40:06.904970       1 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{https://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
	I0704 02:40:06.904980       1 clientconn.go:948] ClientConn switching balancer to "pick_first"
	I0704 02:40:43.317747       1 client.go:360] parsed scheme: "passthrough"
	I0704 02:40:43.317787       1 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{https://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
	I0704 02:40:43.317796       1 clientconn.go:948] ClientConn switching balancer to "pick_first"
	W0704 02:40:45.046108       1 handler_proxy.go:102] no RequestInfo found in the context
	E0704 02:40:45.046220       1 controller.go:116] loading OpenAPI spec for "v1beta1.metrics.k8s.io" failed with: failed to retrieve openAPI spec, http error: ResponseCode: 503, Body: service unavailable
	, Header: map[Content-Type:[text/plain; charset=utf-8] X-Content-Type-Options:[nosniff]]
	I0704 02:40:45.046230       1 controller.go:129] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Rate Limited Requeue.
	I0704 02:41:14.455681       1 client.go:360] parsed scheme: "passthrough"
	I0704 02:41:14.455724       1 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{https://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
	I0704 02:41:14.455745       1 clientconn.go:948] ClientConn switching balancer to "pick_first"
	I0704 02:41:52.239729       1 client.go:360] parsed scheme: "passthrough"
	I0704 02:41:52.239830       1 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{https://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
	I0704 02:41:52.239858       1 clientconn.go:948] ClientConn switching balancer to "pick_first"
	I0704 02:42:29.222863       1 client.go:360] parsed scheme: "passthrough"
	I0704 02:42:29.222907       1 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{https://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
	I0704 02:42:29.222916       1 clientconn.go:948] ClientConn switching balancer to "pick_first"
	W0704 02:42:42.603583       1 handler_proxy.go:102] no RequestInfo found in the context
	E0704 02:42:42.603850       1 controller.go:116] loading OpenAPI spec for "v1beta1.metrics.k8s.io" failed with: failed to retrieve openAPI spec, http error: ResponseCode: 503, Body: service unavailable
	, Header: map[Content-Type:[text/plain; charset=utf-8] X-Content-Type-Options:[nosniff]]
	I0704 02:42:42.603872       1 controller.go:129] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Rate Limited Requeue.
	I0704 02:43:11.115335       1 client.go:360] parsed scheme: "passthrough"
	I0704 02:43:11.115389       1 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{https://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
	I0704 02:43:11.115398       1 clientconn.go:948] ClientConn switching balancer to "pick_first"
	
	
	==> kube-controller-manager [83d7189811f75b1bd3a98867c17d6570b9bfcd834d612431e0cb4e1fab3f4cb7] <==
	W0704 02:39:06.414496       1 garbagecollector.go:703] failed to discover some groups: map[metrics.k8s.io/v1beta1:the server is currently unable to handle the request]
	E0704 02:39:29.974626       1 resource_quota_controller.go:409] unable to retrieve the complete list of server APIs: metrics.k8s.io/v1beta1: the server is currently unable to handle the request
	I0704 02:39:38.064935       1 request.go:655] Throttling request took 1.047608597s, request: GET:https://192.168.76.2:8443/apis/extensions/v1beta1?timeout=32s
	W0704 02:39:38.916638       1 garbagecollector.go:703] failed to discover some groups: map[metrics.k8s.io/v1beta1:the server is currently unable to handle the request]
	E0704 02:40:00.491664       1 resource_quota_controller.go:409] unable to retrieve the complete list of server APIs: metrics.k8s.io/v1beta1: the server is currently unable to handle the request
	I0704 02:40:10.566952       1 request.go:655] Throttling request took 1.048163227s, request: GET:https://192.168.76.2:8443/apis/networking.k8s.io/v1?timeout=32s
	W0704 02:40:11.418458       1 garbagecollector.go:703] failed to discover some groups: map[metrics.k8s.io/v1beta1:the server is currently unable to handle the request]
	E0704 02:40:30.993700       1 resource_quota_controller.go:409] unable to retrieve the complete list of server APIs: metrics.k8s.io/v1beta1: the server is currently unable to handle the request
	I0704 02:40:43.068844       1 request.go:655] Throttling request took 1.047916396s, request: GET:https://192.168.76.2:8443/apis/extensions/v1beta1?timeout=32s
	W0704 02:40:43.920454       1 garbagecollector.go:703] failed to discover some groups: map[metrics.k8s.io/v1beta1:the server is currently unable to handle the request]
	E0704 02:41:01.496428       1 resource_quota_controller.go:409] unable to retrieve the complete list of server APIs: metrics.k8s.io/v1beta1: the server is currently unable to handle the request
	I0704 02:41:15.524237       1 request.go:655] Throttling request took 1.001740248s, request: GET:https://192.168.76.2:8443/apis/networking.k8s.io/v1beta1?timeout=32s
	W0704 02:41:16.422416       1 garbagecollector.go:703] failed to discover some groups: map[metrics.k8s.io/v1beta1:the server is currently unable to handle the request]
	E0704 02:41:31.998706       1 resource_quota_controller.go:409] unable to retrieve the complete list of server APIs: metrics.k8s.io/v1beta1: the server is currently unable to handle the request
	I0704 02:41:48.073063       1 request.go:655] Throttling request took 1.048260712s, request: GET:https://192.168.76.2:8443/apis/events.k8s.io/v1beta1?timeout=32s
	W0704 02:41:48.924505       1 garbagecollector.go:703] failed to discover some groups: map[metrics.k8s.io/v1beta1:the server is currently unable to handle the request]
	E0704 02:42:02.500706       1 resource_quota_controller.go:409] unable to retrieve the complete list of server APIs: metrics.k8s.io/v1beta1: the server is currently unable to handle the request
	I0704 02:42:20.574895       1 request.go:655] Throttling request took 1.046825443s, request: GET:https://192.168.76.2:8443/apis/policy/v1beta1?timeout=32s
	W0704 02:42:21.426317       1 garbagecollector.go:703] failed to discover some groups: map[metrics.k8s.io/v1beta1:the server is currently unable to handle the request]
	E0704 02:42:33.003111       1 resource_quota_controller.go:409] unable to retrieve the complete list of server APIs: metrics.k8s.io/v1beta1: the server is currently unable to handle the request
	I0704 02:42:53.076815       1 request.go:655] Throttling request took 1.047759694s, request: GET:https://192.168.76.2:8443/apis/extensions/v1beta1?timeout=32s
	W0704 02:42:53.928389       1 garbagecollector.go:703] failed to discover some groups: map[metrics.k8s.io/v1beta1:the server is currently unable to handle the request]
	E0704 02:43:03.505246       1 resource_quota_controller.go:409] unable to retrieve the complete list of server APIs: metrics.k8s.io/v1beta1: the server is currently unable to handle the request
	I0704 02:43:25.578860       1 request.go:655] Throttling request took 1.048220568s, request: GET:https://192.168.76.2:8443/apis/networking.k8s.io/v1?timeout=32s
	W0704 02:43:26.430472       1 garbagecollector.go:703] failed to discover some groups: map[metrics.k8s.io/v1beta1:the server is currently unable to handle the request]
	
	
	==> kube-controller-manager [c1881aed94a4bf9ecfebcec75304d41e97b25974bd8a653d24b470be7bbaa222] <==
	I0704 02:35:03.211057       1 event.go:291] "Event occurred" object="old-k8s-version-610521" kind="Node" apiVersion="v1" type="Normal" reason="RegisteredNode" message="Node old-k8s-version-610521 event: Registered Node old-k8s-version-610521 in Controller"
	I0704 02:35:03.224331       1 range_allocator.go:373] Set node old-k8s-version-610521 PodCIDR to [10.244.0.0/24]
	I0704 02:35:03.234542       1 event.go:291] "Event occurred" object="kube-system/coredns" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set coredns-74ff55c5b to 2"
	I0704 02:35:03.283409       1 shared_informer.go:247] Caches are synced for stateful set 
	I0704 02:35:03.294530       1 shared_informer.go:247] Caches are synced for resource quota 
	I0704 02:35:03.313961       1 event.go:291] "Event occurred" object="kube-system/kube-apiserver-old-k8s-version-610521" kind="Pod" apiVersion="v1" type="Warning" reason="NodeNotReady" message="Node is not ready"
	E0704 02:35:03.325619       1 clusterroleaggregation_controller.go:181] view failed with : Operation cannot be fulfilled on clusterroles.rbac.authorization.k8s.io "view": the object has been modified; please apply your changes to the latest version and try again
	E0704 02:35:03.325980       1 clusterroleaggregation_controller.go:181] edit failed with : Operation cannot be fulfilled on clusterroles.rbac.authorization.k8s.io "edit": the object has been modified; please apply your changes to the latest version and try again
	I0704 02:35:03.338341       1 event.go:291] "Event occurred" object="kube-system/coredns-74ff55c5b" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: coredns-74ff55c5b-mzcfn"
	I0704 02:35:03.382158       1 shared_informer.go:247] Caches are synced for resource quota 
	I0704 02:35:03.386098       1 shared_informer.go:247] Caches are synced for daemon sets 
	I0704 02:35:03.410218       1 event.go:291] "Event occurred" object="kube-system/coredns-74ff55c5b" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: coredns-74ff55c5b-r7qv7"
	I0704 02:35:03.505677       1 event.go:291] "Event occurred" object="kube-system/kindnet" kind="DaemonSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: kindnet-4fp4t"
	I0704 02:35:03.546598       1 event.go:291] "Event occurred" object="kube-system/kube-proxy" kind="DaemonSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: kube-proxy-q4c98"
	E0704 02:35:03.772808       1 daemon_controller.go:320] kube-system/kindnet failed with : error storing status for daemon set &v1.DaemonSet{TypeMeta:v1.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:v1.ObjectMeta{Name:"kindnet", GenerateName:"", Namespace:"kube-system", SelfLink:"", UID:"d1958912-f4d1-463f-bfa5-0c0d80238f81", ResourceVersion:"289", Generation:1, CreationTimestamp:v1.Time{Time:time.Time{wall:0x0, ext:63855657288, loc:(*time.Location)(0x632eb80)}}, DeletionTimestamp:(*v1.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"app":"kindnet", "k8s-app":"kindnet", "tier":"node"}, Annotations:map[string]string{"deprecated.daemonset.template.generation":"1", "kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"apps/v1\",\"kind\":\"DaemonSet\",\"metadata\":{\"annotations\":{},\"labels\":{\"app\":\"kindnet\",\"k8s-app\":\"kindnet\",\"tier\":\"node\"},\"name\":\"kindnet\",\"namespace\":\"kube-system\"},\"spec\":{\"selector\":{\"matchLabels\":{\"app\":\"k
indnet\"}},\"template\":{\"metadata\":{\"labels\":{\"app\":\"kindnet\",\"k8s-app\":\"kindnet\",\"tier\":\"node\"}},\"spec\":{\"containers\":[{\"env\":[{\"name\":\"HOST_IP\",\"valueFrom\":{\"fieldRef\":{\"fieldPath\":\"status.hostIP\"}}},{\"name\":\"POD_IP\",\"valueFrom\":{\"fieldRef\":{\"fieldPath\":\"status.podIP\"}}},{\"name\":\"POD_SUBNET\",\"value\":\"10.244.0.0/16\"}],\"image\":\"docker.io/kindest/kindnetd:v20240513-cd2ac642\",\"name\":\"kindnet-cni\",\"resources\":{\"limits\":{\"cpu\":\"100m\",\"memory\":\"50Mi\"},\"requests\":{\"cpu\":\"100m\",\"memory\":\"50Mi\"}},\"securityContext\":{\"capabilities\":{\"add\":[\"NET_RAW\",\"NET_ADMIN\"]},\"privileged\":false},\"volumeMounts\":[{\"mountPath\":\"/etc/cni/net.d\",\"name\":\"cni-cfg\"},{\"mountPath\":\"/run/xtables.lock\",\"name\":\"xtables-lock\",\"readOnly\":false},{\"mountPath\":\"/lib/modules\",\"name\":\"lib-modules\",\"readOnly\":true}]}],\"hostNetwork\":true,\"serviceAccountName\":\"kindnet\",\"tolerations\":[{\"effect\":\"NoSchedule\",\"operator\
":\"Exists\"}],\"volumes\":[{\"hostPath\":{\"path\":\"/etc/cni/net.d\",\"type\":\"DirectoryOrCreate\"},\"name\":\"cni-cfg\"},{\"hostPath\":{\"path\":\"/run/xtables.lock\",\"type\":\"FileOrCreate\"},\"name\":\"xtables-lock\"},{\"hostPath\":{\"path\":\"/lib/modules\"},\"name\":\"lib-modules\"}]}}}}\n"}, OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:"", ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:"kubectl-client-side-apply", Operation:"Update", APIVersion:"apps/v1", Time:(*v1.Time)(0x400179c240), FieldsType:"FieldsV1", FieldsV1:(*v1.FieldsV1)(0x400179c260)}}}, Spec:v1.DaemonSetSpec{Selector:(*v1.LabelSelector)(0x400179c280), Template:v1.PodTemplateSpec{ObjectMeta:v1.ObjectMeta{Name:"", GenerateName:"", Namespace:"", SelfLink:"", UID:"", ResourceVersion:"", Generation:0, CreationTimestamp:v1.Time{Time:time.Time{wall:0x0, ext:0, loc:(*time.Location)(nil)}}, DeletionTimestamp:(*v1.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string
{"app":"kindnet", "k8s-app":"kindnet", "tier":"node"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:"", ManagedFields:[]v1.ManagedFieldsEntry(nil)}, Spec:v1.PodSpec{Volumes:[]v1.Volume{v1.Volume{Name:"cni-cfg", VolumeSource:v1.VolumeSource{HostPath:(*v1.HostPathVolumeSource)(0x400179c2a0), EmptyDir:(*v1.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*v1.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*v1.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*v1.GitRepoVolumeSource)(nil), Secret:(*v1.SecretVolumeSource)(nil), NFS:(*v1.NFSVolumeSource)(nil), ISCSI:(*v1.ISCSIVolumeSource)(nil), Glusterfs:(*v1.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*v1.PersistentVolumeClaimVolumeSource)(nil), RBD:(*v1.RBDVolumeSource)(nil), FlexVolume:(*v1.FlexVolumeSource)(nil), Cinder:(*v1.CinderVolumeSource)(nil), CephFS:(*v1.CephFSVolumeSource)(nil), Flocker:(*v1.FlockerVolumeSource)(nil), DownwardAPI:(*v1.DownwardAPIVolumeSource)(nil),
FC:(*v1.FCVolumeSource)(nil), AzureFile:(*v1.AzureFileVolumeSource)(nil), ConfigMap:(*v1.ConfigMapVolumeSource)(nil), VsphereVolume:(*v1.VsphereVirtualDiskVolumeSource)(nil), Quobyte:(*v1.QuobyteVolumeSource)(nil), AzureDisk:(*v1.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*v1.PhotonPersistentDiskVolumeSource)(nil), Projected:(*v1.ProjectedVolumeSource)(nil), PortworxVolume:(*v1.PortworxVolumeSource)(nil), ScaleIO:(*v1.ScaleIOVolumeSource)(nil), StorageOS:(*v1.StorageOSVolumeSource)(nil), CSI:(*v1.CSIVolumeSource)(nil), Ephemeral:(*v1.EphemeralVolumeSource)(nil)}}, v1.Volume{Name:"xtables-lock", VolumeSource:v1.VolumeSource{HostPath:(*v1.HostPathVolumeSource)(0x400179c2c0), EmptyDir:(*v1.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*v1.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*v1.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*v1.GitRepoVolumeSource)(nil), Secret:(*v1.SecretVolumeSource)(nil), NFS:(*v1.NFSVolumeSource)(nil), ISCSI:(*v1.ISCSIVolumeSource)(nil), Glusterfs:(*v1.Glust
erfsVolumeSource)(nil), PersistentVolumeClaim:(*v1.PersistentVolumeClaimVolumeSource)(nil), RBD:(*v1.RBDVolumeSource)(nil), FlexVolume:(*v1.FlexVolumeSource)(nil), Cinder:(*v1.CinderVolumeSource)(nil), CephFS:(*v1.CephFSVolumeSource)(nil), Flocker:(*v1.FlockerVolumeSource)(nil), DownwardAPI:(*v1.DownwardAPIVolumeSource)(nil), FC:(*v1.FCVolumeSource)(nil), AzureFile:(*v1.AzureFileVolumeSource)(nil), ConfigMap:(*v1.ConfigMapVolumeSource)(nil), VsphereVolume:(*v1.VsphereVirtualDiskVolumeSource)(nil), Quobyte:(*v1.QuobyteVolumeSource)(nil), AzureDisk:(*v1.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*v1.PhotonPersistentDiskVolumeSource)(nil), Projected:(*v1.ProjectedVolumeSource)(nil), PortworxVolume:(*v1.PortworxVolumeSource)(nil), ScaleIO:(*v1.ScaleIOVolumeSource)(nil), StorageOS:(*v1.StorageOSVolumeSource)(nil), CSI:(*v1.CSIVolumeSource)(nil), Ephemeral:(*v1.EphemeralVolumeSource)(nil)}}, v1.Volume{Name:"lib-modules", VolumeSource:v1.VolumeSource{HostPath:(*v1.HostPathVolumeSource)(0x400179c2e0), EmptyDi
r:(*v1.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*v1.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*v1.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*v1.GitRepoVolumeSource)(nil), Secret:(*v1.SecretVolumeSource)(nil), NFS:(*v1.NFSVolumeSource)(nil), ISCSI:(*v1.ISCSIVolumeSource)(nil), Glusterfs:(*v1.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*v1.PersistentVolumeClaimVolumeSource)(nil), RBD:(*v1.RBDVolumeSource)(nil), FlexVolume:(*v1.FlexVolumeSource)(nil), Cinder:(*v1.CinderVolumeSource)(nil), CephFS:(*v1.CephFSVolumeSource)(nil), Flocker:(*v1.FlockerVolumeSource)(nil), DownwardAPI:(*v1.DownwardAPIVolumeSource)(nil), FC:(*v1.FCVolumeSource)(nil), AzureFile:(*v1.AzureFileVolumeSource)(nil), ConfigMap:(*v1.ConfigMapVolumeSource)(nil), VsphereVolume:(*v1.VsphereVirtualDiskVolumeSource)(nil), Quobyte:(*v1.QuobyteVolumeSource)(nil), AzureDisk:(*v1.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*v1.PhotonPersistentDiskVolumeSource)(nil), Projected:(*v1.ProjectedVolumeSource)(nil),
PortworxVolume:(*v1.PortworxVolumeSource)(nil), ScaleIO:(*v1.ScaleIOVolumeSource)(nil), StorageOS:(*v1.StorageOSVolumeSource)(nil), CSI:(*v1.CSIVolumeSource)(nil), Ephemeral:(*v1.EphemeralVolumeSource)(nil)}}}, InitContainers:[]v1.Container(nil), Containers:[]v1.Container{v1.Container{Name:"kindnet-cni", Image:"docker.io/kindest/kindnetd:v20240513-cd2ac642", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]v1.ContainerPort(nil), EnvFrom:[]v1.EnvFromSource(nil), Env:[]v1.EnvVar{v1.EnvVar{Name:"HOST_IP", Value:"", ValueFrom:(*v1.EnvVarSource)(0x400179c300)}, v1.EnvVar{Name:"POD_IP", Value:"", ValueFrom:(*v1.EnvVarSource)(0x400179c340)}, v1.EnvVar{Name:"POD_SUBNET", Value:"10.244.0.0/16", ValueFrom:(*v1.EnvVarSource)(nil)}}, Resources:v1.ResourceRequirements{Limits:v1.ResourceList{"cpu":resource.Quantity{i:resource.int64Amount{value:100, scale:-3}, d:resource.infDecAmount{Dec:(*inf.Dec)(nil)}, s:"100m", Format:"DecimalSI"}, "memory":resource.Quantity{i:resource.int64Amount{value:52428800, scale:
0}, d:resource.infDecAmount{Dec:(*inf.Dec)(nil)}, s:"50Mi", Format:"BinarySI"}}, Requests:v1.ResourceList{"cpu":resource.Quantity{i:resource.int64Amount{value:100, scale:-3}, d:resource.infDecAmount{Dec:(*inf.Dec)(nil)}, s:"100m", Format:"DecimalSI"}, "memory":resource.Quantity{i:resource.int64Amount{value:52428800, scale:0}, d:resource.infDecAmount{Dec:(*inf.Dec)(nil)}, s:"50Mi", Format:"BinarySI"}}}, VolumeMounts:[]v1.VolumeMount{v1.VolumeMount{Name:"cni-cfg", ReadOnly:false, MountPath:"/etc/cni/net.d", SubPath:"", MountPropagation:(*v1.MountPropagationMode)(nil), SubPathExpr:""}, v1.VolumeMount{Name:"xtables-lock", ReadOnly:false, MountPath:"/run/xtables.lock", SubPath:"", MountPropagation:(*v1.MountPropagationMode)(nil), SubPathExpr:""}, v1.VolumeMount{Name:"lib-modules", ReadOnly:true, MountPath:"/lib/modules", SubPath:"", MountPropagation:(*v1.MountPropagationMode)(nil), SubPathExpr:""}}, VolumeDevices:[]v1.VolumeDevice(nil), LivenessProbe:(*v1.Probe)(nil), ReadinessProbe:(*v1.Probe)(nil), StartupProbe:
(*v1.Probe)(nil), Lifecycle:(*v1.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", TerminationMessagePolicy:"File", ImagePullPolicy:"IfNotPresent", SecurityContext:(*v1.SecurityContext)(0x400154eb40), Stdin:false, StdinOnce:false, TTY:false}}, EphemeralContainers:[]v1.EphemeralContainer(nil), RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0x4001c02048), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"kindnet", DeprecatedServiceAccount:"kindnet", AutomountServiceAccountToken:(*bool)(nil), NodeName:"", HostNetwork:true, HostPID:false, HostIPC:false, ShareProcessNamespace:(*bool)(nil), SecurityContext:(*v1.PodSecurityContext)(0x40007141c0), ImagePullSecrets:[]v1.LocalObjectReference(nil), Hostname:"", Subdomain:"", Affinity:(*v1.Affinity)(nil), SchedulerName:"default-scheduler", Tolerations:[]v1.Toleration{v1.Toleration{Key:"", Operator:"Exists", Value:"", Effect:"NoSchedule", TolerationSeconds:(*int64)(nil)}},
HostAliases:[]v1.HostAlias(nil), PriorityClassName:"", Priority:(*int32)(nil), DNSConfig:(*v1.PodDNSConfig)(nil), ReadinessGates:[]v1.PodReadinessGate(nil), RuntimeClassName:(*string)(nil), EnableServiceLinks:(*bool)(nil), PreemptionPolicy:(*v1.PreemptionPolicy)(nil), Overhead:v1.ResourceList(nil), TopologySpreadConstraints:[]v1.TopologySpreadConstraint(nil), SetHostnameAsFQDN:(*bool)(nil)}}, UpdateStrategy:v1.DaemonSetUpdateStrategy{Type:"RollingUpdate", RollingUpdate:(*v1.RollingUpdateDaemonSet)(0x400162e308)}, MinReadySeconds:0, RevisionHistoryLimit:(*int32)(0x4001c02090)}, Status:v1.DaemonSetStatus{CurrentNumberScheduled:0, NumberMisscheduled:0, DesiredNumberScheduled:0, NumberReady:0, ObservedGeneration:0, UpdatedNumberScheduled:0, NumberAvailable:0, NumberUnavailable:0, CollisionCount:(*int32)(nil), Conditions:[]v1.DaemonSetCondition(nil)}}: Operation cannot be fulfilled on daemonsets.apps "kindnet": the object has been modified; please apply your changes to the latest version and try again
	I0704 02:35:03.775447       1 shared_informer.go:240] Waiting for caches to sync for garbage collector
	I0704 02:35:03.827196       1 shared_informer.go:247] Caches are synced for garbage collector 
	I0704 02:35:03.827219       1 garbagecollector.go:151] Garbage collector: all resource monitors have synced. Proceeding to collect garbage
	I0704 02:35:03.875698       1 shared_informer.go:247] Caches are synced for garbage collector 
	I0704 02:35:05.042240       1 event.go:291] "Event occurred" object="kube-system/coredns" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled down replica set coredns-74ff55c5b to 1"
	I0704 02:35:05.064591       1 event.go:291] "Event occurred" object="kube-system/coredns-74ff55c5b" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulDelete" message="Deleted pod: coredns-74ff55c5b-mzcfn"
	I0704 02:35:08.209960       1 node_lifecycle_controller.go:1222] Controller detected that some Nodes are Ready. Exiting master disruption mode.
	I0704 02:37:05.197358       1 event.go:291] "Event occurred" object="kube-system/metrics-server" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set metrics-server-9975d5f86 to 1"
	E0704 02:37:05.329606       1 clusterroleaggregation_controller.go:181] view failed with : Operation cannot be fulfilled on clusterroles.rbac.authorization.k8s.io "view": the object has been modified; please apply your changes to the latest version and try again
	E0704 02:37:05.336105       1 clusterroleaggregation_controller.go:181] edit failed with : Operation cannot be fulfilled on clusterroles.rbac.authorization.k8s.io "edit": the object has been modified; please apply your changes to the latest version and try again
	
	
	==> kube-proxy [c87928a87c1d3d7d27dafbbb731a758e72e428732de7dc39c2c952505e65234f] <==
	I0704 02:37:44.206673       1 node.go:172] Successfully retrieved node IP: 192.168.76.2
	I0704 02:37:44.206888       1 server_others.go:142] kube-proxy node IP is an IPv4 address (192.168.76.2), assume IPv4 operation
	W0704 02:37:44.225637       1 server_others.go:578] Unknown proxy mode "", assuming iptables proxy
	I0704 02:37:44.225855       1 server_others.go:185] Using iptables Proxier.
	I0704 02:37:44.226294       1 server.go:650] Version: v1.20.0
	I0704 02:37:44.227103       1 config.go:224] Starting endpoint slice config controller
	I0704 02:37:44.228646       1 shared_informer.go:240] Waiting for caches to sync for endpoint slice config
	I0704 02:37:44.227600       1 config.go:315] Starting service config controller
	I0704 02:37:44.228991       1 shared_informer.go:240] Waiting for caches to sync for service config
	I0704 02:37:44.328952       1 shared_informer.go:247] Caches are synced for endpoint slice config 
	I0704 02:37:44.329212       1 shared_informer.go:247] Caches are synced for service config 
	
	
	==> kube-proxy [e36274ec135b818053c9b1ddec36771b772a75851df732aa1a4fa768cc912ac8] <==
	I0704 02:35:04.579396       1 node.go:172] Successfully retrieved node IP: 192.168.76.2
	I0704 02:35:04.579546       1 server_others.go:142] kube-proxy node IP is an IPv4 address (192.168.76.2), assume IPv4 operation
	W0704 02:35:04.610429       1 server_others.go:578] Unknown proxy mode "", assuming iptables proxy
	I0704 02:35:04.615742       1 server_others.go:185] Using iptables Proxier.
	I0704 02:35:04.616003       1 server.go:650] Version: v1.20.0
	I0704 02:35:04.616521       1 config.go:315] Starting service config controller
	I0704 02:35:04.616529       1 shared_informer.go:240] Waiting for caches to sync for service config
	I0704 02:35:04.620525       1 config.go:224] Starting endpoint slice config controller
	I0704 02:35:04.620537       1 shared_informer.go:240] Waiting for caches to sync for endpoint slice config
	I0704 02:35:04.721047       1 shared_informer.go:247] Caches are synced for endpoint slice config 
	I0704 02:35:04.721115       1 shared_informer.go:247] Caches are synced for service config 
	
	
	==> kube-scheduler [50f1aed2fd69db9f1469ae8c509e044f5883a8af17b914e3ccea66e5159e538c] <==
	I0704 02:37:37.428321       1 serving.go:331] Generated self-signed cert in-memory
	W0704 02:37:41.656130       1 requestheader_controller.go:193] Unable to get configmap/extension-apiserver-authentication in kube-system.  Usually fixed by 'kubectl create rolebinding -n kube-system ROLEBINDING_NAME --role=extension-apiserver-authentication-reader --serviceaccount=YOUR_NS:YOUR_SA'
	W0704 02:37:41.656162       1 authentication.go:332] Error looking up in-cluster authentication configuration: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot get resource "configmaps" in API group "" in the namespace "kube-system"
	W0704 02:37:41.656170       1 authentication.go:333] Continuing without authentication configuration. This may treat all requests as anonymous.
	W0704 02:37:41.656176       1 authentication.go:334] To require authentication configuration lookup to succeed, set --authentication-tolerate-lookup-failure=false
	I0704 02:37:41.907433       1 secure_serving.go:197] Serving securely on 127.0.0.1:10259
	I0704 02:37:41.908395       1 configmap_cafile_content.go:202] Starting client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0704 02:37:41.908429       1 shared_informer.go:240] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0704 02:37:41.908448       1 tlsconfig.go:240] Starting DynamicServingCertificateController
	I0704 02:37:42.015086       1 shared_informer.go:247] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file 
	
	
	==> kube-scheduler [c77b76d8ee8e90997b74a44444dda1ceb1606c1bcac6e2e897f099aefbae9ca5] <==
	E0704 02:34:43.556695       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0704 02:34:43.562464       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0704 02:34:43.562774       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0704 02:34:43.604391       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0704 02:34:43.604714       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0704 02:34:43.604907       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0704 02:34:43.605188       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0704 02:34:43.605488       1 reflector.go:138] k8s.io/apiserver/pkg/server/dynamiccertificates/configmap_cafile_content.go:206: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0704 02:34:43.605571       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0704 02:34:43.605626       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0704 02:34:43.605685       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1beta1.PodDisruptionBudget: failed to list *v1beta1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0704 02:34:43.605746       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0704 02:34:44.454813       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0704 02:34:44.657397       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0704 02:34:44.657838       1 reflector.go:138] k8s.io/apiserver/pkg/server/dynamiccertificates/configmap_cafile_content.go:206: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0704 02:34:44.741343       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0704 02:34:44.793333       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0704 02:34:44.871250       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0704 02:34:44.924331       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0704 02:34:44.924420       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0704 02:34:45.010877       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0704 02:34:45.011949       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0704 02:34:45.039254       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0704 02:34:45.148535       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1beta1.PodDisruptionBudget: failed to list *v1beta1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	I0704 02:34:47.919778       1 shared_informer.go:247] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file 
	
	
	==> kubelet <==
	Jul 04 02:41:57 old-k8s-version-610521 kubelet[660]: E0704 02:41:57.522153     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	Jul 04 02:42:09 old-k8s-version-610521 kubelet[660]: I0704 02:42:09.521343     660 scope.go:95] [topologymanager] RemoveContainer - Container ID: 0dce99ee6be65fe7ea4530a95d0f1f7baf7bf003e473a5c867e2b2d54a4808a1
	Jul 04 02:42:09 old-k8s-version-610521 kubelet[660]: E0704 02:42:09.521551     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	Jul 04 02:42:09 old-k8s-version-610521 kubelet[660]: E0704 02:42:09.522234     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	Jul 04 02:42:22 old-k8s-version-610521 kubelet[660]: I0704 02:42:22.520832     660 scope.go:95] [topologymanager] RemoveContainer - Container ID: 0dce99ee6be65fe7ea4530a95d0f1f7baf7bf003e473a5c867e2b2d54a4808a1
	Jul 04 02:42:22 old-k8s-version-610521 kubelet[660]: E0704 02:42:22.521701     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	Jul 04 02:42:22 old-k8s-version-610521 kubelet[660]: E0704 02:42:22.521892     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	Jul 04 02:42:33 old-k8s-version-610521 kubelet[660]: E0704 02:42:33.521599     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	Jul 04 02:42:36 old-k8s-version-610521 kubelet[660]: I0704 02:42:36.521147     660 scope.go:95] [topologymanager] RemoveContainer - Container ID: 0dce99ee6be65fe7ea4530a95d0f1f7baf7bf003e473a5c867e2b2d54a4808a1
	Jul 04 02:42:36 old-k8s-version-610521 kubelet[660]: E0704 02:42:36.521477     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: I0704 02:42:48.520812     660 scope.go:95] [topologymanager] RemoveContainer - Container ID: 0dce99ee6be65fe7ea4530a95d0f1f7baf7bf003e473a5c867e2b2d54a4808a1
	Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521448     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	Jul 04 02:42:48 old-k8s-version-610521 kubelet[660]: E0704 02:42:48.521725     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	Jul 04 02:43:00 old-k8s-version-610521 kubelet[660]: E0704 02:43:00.521165     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	Jul 04 02:43:01 old-k8s-version-610521 kubelet[660]: I0704 02:43:01.524745     660 scope.go:95] [topologymanager] RemoveContainer - Container ID: 0dce99ee6be65fe7ea4530a95d0f1f7baf7bf003e473a5c867e2b2d54a4808a1
	Jul 04 02:43:01 old-k8s-version-610521 kubelet[660]: E0704 02:43:01.526471     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	Jul 04 02:43:14 old-k8s-version-610521 kubelet[660]: E0704 02:43:14.521315     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ImagePullBackOff: "Back-off pulling image \"fake.domain/registry.k8s.io/echoserver:1.4\""
	Jul 04 02:43:16 old-k8s-version-610521 kubelet[660]: I0704 02:43:16.520580     660 scope.go:95] [topologymanager] RemoveContainer - Container ID: 0dce99ee6be65fe7ea4530a95d0f1f7baf7bf003e473a5c867e2b2d54a4808a1
	Jul 04 02:43:16 old-k8s-version-610521 kubelet[660]: E0704 02:43:16.521001     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	Jul 04 02:43:26 old-k8s-version-610521 kubelet[660]: E0704 02:43:26.528855     660 remote_image.go:113] PullImage "fake.domain/registry.k8s.io/echoserver:1.4" from image service failed: rpc error: code = Unknown desc = failed to pull and unpack image "fake.domain/registry.k8s.io/echoserver:1.4": failed to resolve reference "fake.domain/registry.k8s.io/echoserver:1.4": failed to do request: Head "https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host
	Jul 04 02:43:26 old-k8s-version-610521 kubelet[660]: E0704 02:43:26.528909     660 kuberuntime_image.go:51] Pull image "fake.domain/registry.k8s.io/echoserver:1.4" failed: rpc error: code = Unknown desc = failed to pull and unpack image "fake.domain/registry.k8s.io/echoserver:1.4": failed to resolve reference "fake.domain/registry.k8s.io/echoserver:1.4": failed to do request: Head "https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host
	Jul 04 02:43:26 old-k8s-version-610521 kubelet[660]: E0704 02:43:26.529042     660 kuberuntime_manager.go:829] container &Container{Name:metrics-server,Image:fake.domain/registry.k8s.io/echoserver:1.4,Command:[],Args:[--cert-dir=/tmp --secure-port=4443 --kubelet-preferred-address-types=InternalIP,ExternalIP,Hostname --kubelet-use-node-status-port --metric-resolution=60s --kubelet-insecure-tls],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:4443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{100 -3} {<nil>} 100m DecimalSI},memory: {{209715200 0} {<nil>}  BinarySI},},},VolumeMounts:[]VolumeMount{VolumeMount{Name:tmp-dir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,},VolumeMount{Name:metrics-server-token-b2rgx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,},},LivenessProbe:&Probe{Handler:Handler{Exec
:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{1 0 https},Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,},ReadinessProbe:&Probe{Handler:Handler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{1 0 https},Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,} start failed in pod metrics-server-9975d5f86-hvdg7_kube-system(18763bc
b-0bff-4320-8ee8-5880c0b1f3e9): ErrImagePull: rpc error: code = Unknown desc = failed to pull and unpack image "fake.domain/registry.k8s.io/echoserver:1.4": failed to resolve reference "fake.domain/registry.k8s.io/echoserver:1.4": failed to do request: Head "https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host
	Jul 04 02:43:26 old-k8s-version-610521 kubelet[660]: E0704 02:43:26.529076     660 pod_workers.go:191] Error syncing pod 18763bcb-0bff-4320-8ee8-5880c0b1f3e9 ("metrics-server-9975d5f86-hvdg7_kube-system(18763bcb-0bff-4320-8ee8-5880c0b1f3e9)"), skipping: failed to "StartContainer" for "metrics-server" with ErrImagePull: "rpc error: code = Unknown desc = failed to pull and unpack image \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to resolve reference \"fake.domain/registry.k8s.io/echoserver:1.4\": failed to do request: Head \"https://fake.domain/v2/registry.k8s.io/echoserver/manifests/1.4\": dial tcp: lookup fake.domain on 192.168.76.1:53: no such host"
	Jul 04 02:43:27 old-k8s-version-610521 kubelet[660]: I0704 02:43:27.522683     660 scope.go:95] [topologymanager] RemoveContainer - Container ID: 0dce99ee6be65fe7ea4530a95d0f1f7baf7bf003e473a5c867e2b2d54a4808a1
	Jul 04 02:43:27 old-k8s-version-610521 kubelet[660]: E0704 02:43:27.524690     660 pod_workers.go:191] Error syncing pod 796b9c8d-ee21-4752-a2e2-2f6744ad1b9e ("dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"), skipping: failed to "StartContainer" for "dashboard-metrics-scraper" with CrashLoopBackOff: "back-off 2m40s restarting failed container=dashboard-metrics-scraper pod=dashboard-metrics-scraper-8d5bb5db8-rxdl6_kubernetes-dashboard(796b9c8d-ee21-4752-a2e2-2f6744ad1b9e)"
	
	
	==> kubernetes-dashboard [11d610a49bcd9d793ec0a3506ed0f5ea2c0c963cab7b6867ac5b304ba0ad0e3c] <==
	2024/07/04 02:38:08 Using namespace: kubernetes-dashboard
	2024/07/04 02:38:08 Using in-cluster config to connect to apiserver
	2024/07/04 02:38:08 Using secret token for csrf signing
	2024/07/04 02:38:08 Initializing csrf token from kubernetes-dashboard-csrf secret
	2024/07/04 02:38:08 Empty token. Generating and storing in a secret kubernetes-dashboard-csrf
	2024/07/04 02:38:08 Successful initial request to the apiserver, version: v1.20.0
	2024/07/04 02:38:08 Generating JWE encryption key
	2024/07/04 02:38:08 New synchronizer has been registered: kubernetes-dashboard-key-holder-kubernetes-dashboard. Starting
	2024/07/04 02:38:08 Starting secret synchronizer for kubernetes-dashboard-key-holder in namespace kubernetes-dashboard
	2024/07/04 02:38:08 Initializing JWE encryption key from synchronized object
	2024/07/04 02:38:08 Creating in-cluster Sidecar client
	2024/07/04 02:38:08 Metric client health check failed: the server is currently unable to handle the request (get services dashboard-metrics-scraper). Retrying in 30 seconds.
	2024/07/04 02:38:08 Serving insecurely on HTTP port: 9090
	2024/07/04 02:38:38 Metric client health check failed: the server is currently unable to handle the request (get services dashboard-metrics-scraper). Retrying in 30 seconds.
	2024/07/04 02:39:08 Metric client health check failed: the server is currently unable to handle the request (get services dashboard-metrics-scraper). Retrying in 30 seconds.
	2024/07/04 02:39:38 Metric client health check failed: the server is currently unable to handle the request (get services dashboard-metrics-scraper). Retrying in 30 seconds.
	2024/07/04 02:40:08 Metric client health check failed: the server is currently unable to handle the request (get services dashboard-metrics-scraper). Retrying in 30 seconds.
	2024/07/04 02:40:38 Metric client health check failed: the server is currently unable to handle the request (get services dashboard-metrics-scraper). Retrying in 30 seconds.
	2024/07/04 02:41:08 Metric client health check failed: the server is currently unable to handle the request (get services dashboard-metrics-scraper). Retrying in 30 seconds.
	2024/07/04 02:41:38 Metric client health check failed: the server is currently unable to handle the request (get services dashboard-metrics-scraper). Retrying in 30 seconds.
	2024/07/04 02:42:08 Metric client health check failed: the server is currently unable to handle the request (get services dashboard-metrics-scraper). Retrying in 30 seconds.
	2024/07/04 02:42:38 Metric client health check failed: the server is currently unable to handle the request (get services dashboard-metrics-scraper). Retrying in 30 seconds.
	2024/07/04 02:43:08 Metric client health check failed: the server is currently unable to handle the request (get services dashboard-metrics-scraper). Retrying in 30 seconds.
	2024/07/04 02:38:08 Starting overwatch
	
	
	==> storage-provisioner [946bdba0449bb980124c9355f39e4f28729385ded40518e3f52317b9f372be0a] <==
	I0704 02:37:44.154178       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	F0704 02:38:14.158080       1 main.go:39] error getting server version: Get "https://10.96.0.1:443/version?timeout=32s": dial tcp 10.96.0.1:443: i/o timeout
	
	
	==> storage-provisioner [a4760ff354cafc5cfc98e0cf499f32eac6195eb5b19a09c3b9252ff1ed6937a2] <==
	I0704 02:38:26.629810       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0704 02:38:26.650002       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0704 02:38:26.650063       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0704 02:38:44.122913       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"af470d16-263b-4d68-bd64-0ae7ccdafa3b", APIVersion:"v1", ResourceVersion:"856", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' old-k8s-version-610521_bb42e3f5-45b3-46c9-8c44-79c08382dd23 became leader
	I0704 02:38:44.123014       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0704 02:38:44.124909       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_old-k8s-version-610521_bb42e3f5-45b3-46c9-8c44-79c08382dd23!
	I0704 02:38:44.226112       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_old-k8s-version-610521_bb42e3f5-45b3-46c9-8c44-79c08382dd23!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p old-k8s-version-610521 -n old-k8s-version-610521
helpers_test.go:261: (dbg) Run:  kubectl --context old-k8s-version-610521 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:272: non-running pods: metrics-server-9975d5f86-hvdg7
helpers_test.go:274: ======> post-mortem[TestStartStop/group/old-k8s-version/serial/SecondStart]: describe non-running pods <======
helpers_test.go:277: (dbg) Run:  kubectl --context old-k8s-version-610521 describe pod metrics-server-9975d5f86-hvdg7
helpers_test.go:277: (dbg) Non-zero exit: kubectl --context old-k8s-version-610521 describe pod metrics-server-9975d5f86-hvdg7: exit status 1 (90.129809ms)

                                                
                                                
** stderr ** 
	Error from server (NotFound): pods "metrics-server-9975d5f86-hvdg7" not found

                                                
                                                
** /stderr **
helpers_test.go:279: kubectl --context old-k8s-version-610521 describe pod metrics-server-9975d5f86-hvdg7: exit status 1
--- FAIL: TestStartStop/group/old-k8s-version/serial/SecondStart (373.14s)

                                                
                                    

Test pass (289/328)

Order passed test Duration
3 TestDownloadOnly/v1.20.0/json-events 8.52
4 TestDownloadOnly/v1.20.0/preload-exists 0
8 TestDownloadOnly/v1.20.0/LogsDuration 0.08
9 TestDownloadOnly/v1.20.0/DeleteAll 0.21
10 TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds 0.13
12 TestDownloadOnly/v1.30.2/json-events 7.09
13 TestDownloadOnly/v1.30.2/preload-exists 0
17 TestDownloadOnly/v1.30.2/LogsDuration 0.08
18 TestDownloadOnly/v1.30.2/DeleteAll 0.2
19 TestDownloadOnly/v1.30.2/DeleteAlwaysSucceeds 0.13
21 TestBinaryMirror 0.55
25 TestAddons/PreSetup/EnablingAddonOnNonExistingCluster 0.06
26 TestAddons/PreSetup/DisablingAddonOnNonExistingCluster 0.07
27 TestAddons/Setup 160.98
29 TestAddons/parallel/Registry 15.66
32 TestAddons/parallel/MetricsServer 6.91
35 TestAddons/parallel/CSI 34.69
36 TestAddons/parallel/Headlamp 11.07
37 TestAddons/parallel/CloudSpanner 5.6
38 TestAddons/parallel/LocalPath 52.75
39 TestAddons/parallel/NvidiaDevicePlugin 5.56
40 TestAddons/parallel/Yakd 5.01
44 TestCertOptions 35.28
45 TestCertExpiration 229.58
47 TestForceSystemdFlag 40.96
48 TestForceSystemdEnv 38.19
49 TestDockerEnvContainerd 46.89
54 TestErrorSpam/setup 33.7
55 TestErrorSpam/start 0.7
56 TestErrorSpam/status 1.01
57 TestErrorSpam/pause 1.76
58 TestErrorSpam/unpause 1.74
59 TestErrorSpam/stop 1.41
62 TestFunctional/serial/CopySyncFile 0
63 TestFunctional/serial/StartWithProxy 58.07
64 TestFunctional/serial/AuditLog 0
65 TestFunctional/serial/SoftStart 5.82
66 TestFunctional/serial/KubeContext 0.06
67 TestFunctional/serial/KubectlGetPods 0.11
70 TestFunctional/serial/CacheCmd/cache/add_remote 4.51
71 TestFunctional/serial/CacheCmd/cache/add_local 1.31
72 TestFunctional/serial/CacheCmd/cache/CacheDelete 0.05
73 TestFunctional/serial/CacheCmd/cache/list 0.06
74 TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node 0.3
75 TestFunctional/serial/CacheCmd/cache/cache_reload 2.26
76 TestFunctional/serial/CacheCmd/cache/delete 0.11
77 TestFunctional/serial/MinikubeKubectlCmd 0.14
78 TestFunctional/serial/MinikubeKubectlCmdDirectly 0.13
79 TestFunctional/serial/ExtraConfig 39.48
80 TestFunctional/serial/ComponentHealth 0.13
81 TestFunctional/serial/LogsCmd 1.84
82 TestFunctional/serial/LogsFileCmd 1.98
83 TestFunctional/serial/InvalidService 3.78
85 TestFunctional/parallel/ConfigCmd 0.47
86 TestFunctional/parallel/DashboardCmd 8.82
87 TestFunctional/parallel/DryRun 0.9
88 TestFunctional/parallel/InternationalLanguage 0.37
89 TestFunctional/parallel/StatusCmd 1.39
93 TestFunctional/parallel/ServiceCmdConnect 9.59
94 TestFunctional/parallel/AddonsCmd 0.14
95 TestFunctional/parallel/PersistentVolumeClaim 25.6
97 TestFunctional/parallel/SSHCmd 0.73
98 TestFunctional/parallel/CpCmd 2.35
100 TestFunctional/parallel/FileSync 0.43
101 TestFunctional/parallel/CertSync 2.17
105 TestFunctional/parallel/NodeLabels 0.11
107 TestFunctional/parallel/NonActiveRuntimeDisabled 0.71
109 TestFunctional/parallel/License 0.24
111 TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel 0.64
112 TestFunctional/parallel/TunnelCmd/serial/StartTunnel 0
114 TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup 12.55
115 TestFunctional/parallel/TunnelCmd/serial/WaitService/IngressIP 0.09
116 TestFunctional/parallel/TunnelCmd/serial/AccessDirect 0
120 TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel 0.11
121 TestFunctional/parallel/ServiceCmd/DeployApp 8.24
122 TestFunctional/parallel/ProfileCmd/profile_not_create 0.41
123 TestFunctional/parallel/ProfileCmd/profile_list 0.38
124 TestFunctional/parallel/ProfileCmd/profile_json_output 0.4
125 TestFunctional/parallel/MountCmd/any-port 6.15
126 TestFunctional/parallel/ServiceCmd/List 0.51
127 TestFunctional/parallel/ServiceCmd/JSONOutput 0.52
128 TestFunctional/parallel/ServiceCmd/HTTPS 0.48
129 TestFunctional/parallel/ServiceCmd/Format 0.53
130 TestFunctional/parallel/ServiceCmd/URL 0.54
131 TestFunctional/parallel/MountCmd/specific-port 1.38
132 TestFunctional/parallel/MountCmd/VerifyCleanup 1.71
133 TestFunctional/parallel/Version/short 0.09
134 TestFunctional/parallel/Version/components 1.21
135 TestFunctional/parallel/ImageCommands/ImageListShort 0.27
136 TestFunctional/parallel/ImageCommands/ImageListTable 0.33
137 TestFunctional/parallel/ImageCommands/ImageListJson 0.3
138 TestFunctional/parallel/ImageCommands/ImageListYaml 0.3
139 TestFunctional/parallel/ImageCommands/ImageBuild 2.59
140 TestFunctional/parallel/ImageCommands/Setup 1.87
143 TestFunctional/parallel/UpdateContextCmd/no_changes 0.15
144 TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster 0.24
145 TestFunctional/parallel/UpdateContextCmd/no_clusters 0.16
148 TestFunctional/parallel/ImageCommands/ImageRemove 0.45
150 TestFunctional/parallel/ImageCommands/ImageSaveDaemon 0.62
151 TestFunctional/delete_addon-resizer_images 0.08
152 TestFunctional/delete_my-image_image 0.02
153 TestFunctional/delete_minikube_cached_images 0.02
157 TestMultiControlPlane/serial/StartCluster 128.15
158 TestMultiControlPlane/serial/DeployApp 17.28
159 TestMultiControlPlane/serial/PingHostFromPods 1.58
160 TestMultiControlPlane/serial/AddWorkerNode 23.67
161 TestMultiControlPlane/serial/NodeLabels 0.13
162 TestMultiControlPlane/serial/HAppyAfterClusterStart 0.76
163 TestMultiControlPlane/serial/CopyFile 19.8
164 TestMultiControlPlane/serial/StopSecondaryNode 12.91
165 TestMultiControlPlane/serial/DegradedAfterControlPlaneNodeStop 0.58
166 TestMultiControlPlane/serial/RestartSecondaryNode 19.5
167 TestMultiControlPlane/serial/HAppyAfterSecondaryNodeRestart 0.78
168 TestMultiControlPlane/serial/RestartClusterKeepsNodes 143.52
169 TestMultiControlPlane/serial/DeleteSecondaryNode 11.66
170 TestMultiControlPlane/serial/DegradedAfterSecondaryNodeDelete 0.55
171 TestMultiControlPlane/serial/StopCluster 36.05
172 TestMultiControlPlane/serial/RestartCluster 79.85
173 TestMultiControlPlane/serial/DegradedAfterClusterRestart 0.6
174 TestMultiControlPlane/serial/AddSecondaryNode 42.78
175 TestMultiControlPlane/serial/HAppyAfterSecondaryNodeAdd 0.8
179 TestJSONOutput/start/Command 81.82
180 TestJSONOutput/start/Audit 0
182 TestJSONOutput/start/parallel/DistinctCurrentSteps 0
183 TestJSONOutput/start/parallel/IncreasingCurrentSteps 0
185 TestJSONOutput/pause/Command 0.74
186 TestJSONOutput/pause/Audit 0
188 TestJSONOutput/pause/parallel/DistinctCurrentSteps 0
189 TestJSONOutput/pause/parallel/IncreasingCurrentSteps 0
191 TestJSONOutput/unpause/Command 0.68
192 TestJSONOutput/unpause/Audit 0
194 TestJSONOutput/unpause/parallel/DistinctCurrentSteps 0
195 TestJSONOutput/unpause/parallel/IncreasingCurrentSteps 0
197 TestJSONOutput/stop/Command 5.79
198 TestJSONOutput/stop/Audit 0
200 TestJSONOutput/stop/parallel/DistinctCurrentSteps 0
201 TestJSONOutput/stop/parallel/IncreasingCurrentSteps 0
202 TestErrorJSONOutput 0.22
204 TestKicCustomNetwork/create_custom_network 39.18
205 TestKicCustomNetwork/use_default_bridge_network 36.45
206 TestKicExistingNetwork 36.23
207 TestKicCustomSubnet 33.66
208 TestKicStaticIP 34.45
209 TestMainNoArgs 0.06
210 TestMinikubeProfile 63.83
213 TestMountStart/serial/StartWithMountFirst 6.26
214 TestMountStart/serial/VerifyMountFirst 0.28
215 TestMountStart/serial/StartWithMountSecond 6.66
216 TestMountStart/serial/VerifyMountSecond 0.26
217 TestMountStart/serial/DeleteFirst 1.64
218 TestMountStart/serial/VerifyMountPostDelete 0.25
219 TestMountStart/serial/Stop 1.21
220 TestMountStart/serial/RestartStopped 7.78
221 TestMountStart/serial/VerifyMountPostStop 0.28
224 TestMultiNode/serial/FreshStart2Nodes 73.49
225 TestMultiNode/serial/DeployApp2Nodes 4.3
226 TestMultiNode/serial/PingHostFrom2Pods 0.94
227 TestMultiNode/serial/AddNode 16.26
228 TestMultiNode/serial/MultiNodeLabels 0.1
229 TestMultiNode/serial/ProfileList 0.36
230 TestMultiNode/serial/CopyFile 10.43
231 TestMultiNode/serial/StopNode 2.23
232 TestMultiNode/serial/StartAfterStop 10.73
233 TestMultiNode/serial/RestartKeepsNodes 81.08
234 TestMultiNode/serial/DeleteNode 5.49
235 TestMultiNode/serial/StopMultiNode 24
236 TestMultiNode/serial/RestartMultiNode 49.59
237 TestMultiNode/serial/ValidateNameConflict 31.8
242 TestPreload 109.39
244 TestScheduledStopUnix 108.03
247 TestInsufficientStorage 11.32
248 TestRunningBinaryUpgrade 93.29
250 TestKubernetesUpgrade 341.81
251 TestMissingContainerUpgrade 156.88
253 TestPause/serial/Start 99.16
255 TestNoKubernetes/serial/StartNoK8sWithVersion 0.09
256 TestNoKubernetes/serial/StartWithK8s 41.73
257 TestNoKubernetes/serial/StartWithStopK8s 16.29
258 TestNoKubernetes/serial/Start 7.77
259 TestNoKubernetes/serial/VerifyK8sNotRunning 0.3
260 TestNoKubernetes/serial/ProfileList 1.09
261 TestNoKubernetes/serial/Stop 1.2
262 TestNoKubernetes/serial/StartNoArgs 6.95
263 TestNoKubernetes/serial/VerifyK8sNotRunningSecond 0.26
271 TestNetworkPlugins/group/false 3.62
275 TestPause/serial/SecondStartNoReconfiguration 7.53
276 TestPause/serial/Pause 0.88
277 TestPause/serial/VerifyStatus 0.38
278 TestPause/serial/Unpause 0.93
279 TestPause/serial/PauseAgain 1.08
280 TestPause/serial/DeletePaused 3.05
281 TestPause/serial/VerifyDeletedResources 6.18
282 TestStoppedBinaryUpgrade/Setup 0.7
283 TestStoppedBinaryUpgrade/Upgrade 121.83
291 TestNetworkPlugins/group/auto/Start 92.4
292 TestStoppedBinaryUpgrade/MinikubeLogs 1.6
293 TestNetworkPlugins/group/kindnet/Start 88.93
294 TestNetworkPlugins/group/auto/KubeletFlags 0.29
295 TestNetworkPlugins/group/auto/NetCatPod 8.35
296 TestNetworkPlugins/group/auto/DNS 0.23
297 TestNetworkPlugins/group/auto/Localhost 0.16
298 TestNetworkPlugins/group/auto/HairPin 0.16
299 TestNetworkPlugins/group/calico/Start 78.93
300 TestNetworkPlugins/group/kindnet/ControllerPod 6.01
301 TestNetworkPlugins/group/kindnet/KubeletFlags 0.44
302 TestNetworkPlugins/group/kindnet/NetCatPod 10.42
303 TestNetworkPlugins/group/kindnet/DNS 0.19
304 TestNetworkPlugins/group/kindnet/Localhost 0.21
305 TestNetworkPlugins/group/kindnet/HairPin 0.21
306 TestNetworkPlugins/group/custom-flannel/Start 63.97
307 TestNetworkPlugins/group/calico/ControllerPod 6.01
308 TestNetworkPlugins/group/calico/KubeletFlags 0.44
309 TestNetworkPlugins/group/calico/NetCatPod 11.34
310 TestNetworkPlugins/group/calico/DNS 0.32
311 TestNetworkPlugins/group/calico/Localhost 0.2
312 TestNetworkPlugins/group/calico/HairPin 0.21
313 TestNetworkPlugins/group/enable-default-cni/Start 56.32
314 TestNetworkPlugins/group/custom-flannel/KubeletFlags 0.29
315 TestNetworkPlugins/group/custom-flannel/NetCatPod 10.27
316 TestNetworkPlugins/group/custom-flannel/DNS 0.22
317 TestNetworkPlugins/group/custom-flannel/Localhost 0.2
318 TestNetworkPlugins/group/custom-flannel/HairPin 0.22
319 TestNetworkPlugins/group/flannel/Start 68.37
320 TestNetworkPlugins/group/enable-default-cni/KubeletFlags 0.37
321 TestNetworkPlugins/group/enable-default-cni/NetCatPod 11.35
322 TestNetworkPlugins/group/enable-default-cni/DNS 0.22
323 TestNetworkPlugins/group/enable-default-cni/Localhost 0.28
324 TestNetworkPlugins/group/enable-default-cni/HairPin 0.19
325 TestNetworkPlugins/group/bridge/Start 49.82
326 TestNetworkPlugins/group/flannel/ControllerPod 6.01
327 TestNetworkPlugins/group/flannel/KubeletFlags 0.32
328 TestNetworkPlugins/group/flannel/NetCatPod 11.4
329 TestNetworkPlugins/group/flannel/DNS 0.21
330 TestNetworkPlugins/group/flannel/Localhost 0.25
331 TestNetworkPlugins/group/flannel/HairPin 0.16
332 TestNetworkPlugins/group/bridge/KubeletFlags 0.41
333 TestNetworkPlugins/group/bridge/NetCatPod 10.49
334 TestNetworkPlugins/group/bridge/DNS 0.28
335 TestNetworkPlugins/group/bridge/Localhost 0.2
336 TestNetworkPlugins/group/bridge/HairPin 0.2
338 TestStartStop/group/old-k8s-version/serial/FirstStart 182.43
340 TestStartStop/group/no-preload/serial/FirstStart 72.92
341 TestStartStop/group/no-preload/serial/DeployApp 9.35
342 TestStartStop/group/no-preload/serial/EnableAddonWhileActive 1.12
343 TestStartStop/group/no-preload/serial/Stop 12
344 TestStartStop/group/no-preload/serial/EnableAddonAfterStop 0.19
345 TestStartStop/group/no-preload/serial/SecondStart 289.05
346 TestStartStop/group/old-k8s-version/serial/DeployApp 7.54
347 TestStartStop/group/old-k8s-version/serial/EnableAddonWhileActive 1.14
348 TestStartStop/group/old-k8s-version/serial/Stop 12.11
349 TestStartStop/group/old-k8s-version/serial/EnableAddonAfterStop 0.19
351 TestStartStop/group/no-preload/serial/UserAppExistsAfterStop 6.01
352 TestStartStop/group/no-preload/serial/AddonExistsAfterStop 6.13
353 TestStartStop/group/no-preload/serial/VerifyKubernetesImages 0.25
354 TestStartStop/group/no-preload/serial/Pause 3.14
356 TestStartStop/group/embed-certs/serial/FirstStart 84.58
357 TestStartStop/group/embed-certs/serial/DeployApp 9.4
358 TestStartStop/group/embed-certs/serial/EnableAddonWhileActive 1.18
359 TestStartStop/group/embed-certs/serial/Stop 12.06
360 TestStartStop/group/embed-certs/serial/EnableAddonAfterStop 0.19
361 TestStartStop/group/embed-certs/serial/SecondStart 267.25
362 TestStartStop/group/old-k8s-version/serial/UserAppExistsAfterStop 6.01
363 TestStartStop/group/old-k8s-version/serial/AddonExistsAfterStop 6.11
364 TestStartStop/group/old-k8s-version/serial/VerifyKubernetesImages 0.26
365 TestStartStop/group/old-k8s-version/serial/Pause 3.11
367 TestStartStop/group/default-k8s-diff-port/serial/FirstStart 65.06
368 TestStartStop/group/default-k8s-diff-port/serial/DeployApp 9.4
369 TestStartStop/group/default-k8s-diff-port/serial/EnableAddonWhileActive 1.14
370 TestStartStop/group/default-k8s-diff-port/serial/Stop 12.06
371 TestStartStop/group/default-k8s-diff-port/serial/EnableAddonAfterStop 0.19
372 TestStartStop/group/default-k8s-diff-port/serial/SecondStart 265.97
373 TestStartStop/group/embed-certs/serial/UserAppExistsAfterStop 6.01
374 TestStartStop/group/embed-certs/serial/AddonExistsAfterStop 5.1
375 TestStartStop/group/embed-certs/serial/VerifyKubernetesImages 0.25
376 TestStartStop/group/embed-certs/serial/Pause 3.26
378 TestStartStop/group/newest-cni/serial/FirstStart 44.12
379 TestStartStop/group/newest-cni/serial/DeployApp 0
380 TestStartStop/group/newest-cni/serial/EnableAddonWhileActive 1.18
381 TestStartStop/group/newest-cni/serial/Stop 1.25
382 TestStartStop/group/newest-cni/serial/EnableAddonAfterStop 0.18
383 TestStartStop/group/newest-cni/serial/SecondStart 15.68
384 TestStartStop/group/newest-cni/serial/UserAppExistsAfterStop 0
385 TestStartStop/group/newest-cni/serial/AddonExistsAfterStop 0
386 TestStartStop/group/newest-cni/serial/VerifyKubernetesImages 0.25
387 TestStartStop/group/newest-cni/serial/Pause 3
388 TestStartStop/group/default-k8s-diff-port/serial/UserAppExistsAfterStop 6.01
389 TestStartStop/group/default-k8s-diff-port/serial/AddonExistsAfterStop 5.09
390 TestStartStop/group/default-k8s-diff-port/serial/VerifyKubernetesImages 0.25
391 TestStartStop/group/default-k8s-diff-port/serial/Pause 3.04
x
+
TestDownloadOnly/v1.20.0/json-events (8.52s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/json-events
aaa_download_only_test.go:81: (dbg) Run:  out/minikube-linux-arm64 start -o=json --download-only -p download-only-327632 --force --alsologtostderr --kubernetes-version=v1.20.0 --container-runtime=containerd --driver=docker  --container-runtime=containerd
aaa_download_only_test.go:81: (dbg) Done: out/minikube-linux-arm64 start -o=json --download-only -p download-only-327632 --force --alsologtostderr --kubernetes-version=v1.20.0 --container-runtime=containerd --driver=docker  --container-runtime=containerd: (8.520770229s)
--- PASS: TestDownloadOnly/v1.20.0/json-events (8.52s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/preload-exists (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/preload-exists
--- PASS: TestDownloadOnly/v1.20.0/preload-exists (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/LogsDuration (0.08s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/LogsDuration
aaa_download_only_test.go:184: (dbg) Run:  out/minikube-linux-arm64 logs -p download-only-327632
aaa_download_only_test.go:184: (dbg) Non-zero exit: out/minikube-linux-arm64 logs -p download-only-327632: exit status 85 (76.665806ms)

                                                
                                                
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------|----------------------|---------|---------|---------------------|----------|
	| Command |              Args              |       Profile        |  User   | Version |     Start Time      | End Time |
	|---------|--------------------------------|----------------------|---------|---------|---------------------|----------|
	| start   | -o=json --download-only        | download-only-327632 | jenkins | v1.33.1 | 04 Jul 24 01:07 UTC |          |
	|         | -p download-only-327632        |                      |         |         |                     |          |
	|         | --force --alsologtostderr      |                      |         |         |                     |          |
	|         | --kubernetes-version=v1.20.0   |                      |         |         |                     |          |
	|         | --container-runtime=containerd |                      |         |         |                     |          |
	|         | --driver=docker                |                      |         |         |                     |          |
	|         | --container-runtime=containerd |                      |         |         |                     |          |
	|---------|--------------------------------|----------------------|---------|---------|---------------------|----------|
	
	
	==> Last Start <==
	Log file created at: 2024/07/04 01:07:54
	Running on machine: ip-172-31-30-239
	Binary: Built with gc go1.22.4 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0704 01:07:54.313553 1195693 out.go:291] Setting OutFile to fd 1 ...
	I0704 01:07:54.313750 1195693 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:07:54.313777 1195693 out.go:304] Setting ErrFile to fd 2...
	I0704 01:07:54.313795 1195693 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:07:54.314091 1195693 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
	W0704 01:07:54.314291 1195693 root.go:314] Error reading config file at /home/jenkins/minikube-integration/18859-1190282/.minikube/config/config.json: open /home/jenkins/minikube-integration/18859-1190282/.minikube/config/config.json: no such file or directory
	I0704 01:07:54.314724 1195693 out.go:298] Setting JSON to true
	I0704 01:07:54.315680 1195693 start.go:129] hostinfo: {"hostname":"ip-172-31-30-239","uptime":24625,"bootTime":1720030650,"procs":174,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1064-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"92f46a7d-c249-4c12-924a-77f64874c910"}
	I0704 01:07:54.315778 1195693 start.go:139] virtualization:  
	I0704 01:07:54.318562 1195693 out.go:97] [download-only-327632] minikube v1.33.1 on Ubuntu 20.04 (arm64)
	W0704 01:07:54.318710 1195693 preload.go:294] Failed to list preload files: open /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball: no such file or directory
	I0704 01:07:54.318750 1195693 notify.go:220] Checking for updates...
	I0704 01:07:54.320700 1195693 out.go:169] MINIKUBE_LOCATION=18859
	I0704 01:07:54.322625 1195693 out.go:169] MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0704 01:07:54.324416 1195693 out.go:169] KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig
	I0704 01:07:54.326320 1195693 out.go:169] MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube
	I0704 01:07:54.327910 1195693 out.go:169] MINIKUBE_BIN=out/minikube-linux-arm64
	W0704 01:07:54.331459 1195693 out.go:267] minikube skips various validations when --force is supplied; this may lead to unexpected behavior
	I0704 01:07:54.331757 1195693 driver.go:392] Setting default libvirt URI to qemu:///system
	I0704 01:07:54.351412 1195693 docker.go:122] docker version: linux-27.0.3:Docker Engine - Community
	I0704 01:07:54.351531 1195693 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 01:07:54.410030 1195693 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:29 OomKillDisable:true NGoroutines:51 SystemTime:2024-07-04 01:07:54.400835034 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 01:07:54.410150 1195693 docker.go:295] overlay module found
	I0704 01:07:54.412333 1195693 out.go:97] Using the docker driver based on user configuration
	I0704 01:07:54.412369 1195693 start.go:297] selected driver: docker
	I0704 01:07:54.412376 1195693 start.go:901] validating driver "docker" against <nil>
	I0704 01:07:54.412489 1195693 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 01:07:54.464072 1195693 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:29 OomKillDisable:true NGoroutines:51 SystemTime:2024-07-04 01:07:54.455148114 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 01:07:54.464239 1195693 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0704 01:07:54.464533 1195693 start_flags.go:393] Using suggested 2200MB memory alloc based on sys=7834MB, container=7834MB
	I0704 01:07:54.464697 1195693 start_flags.go:929] Wait components to verify : map[apiserver:true system_pods:true]
	I0704 01:07:54.466980 1195693 out.go:169] Using Docker driver with root privileges
	I0704 01:07:54.468868 1195693 cni.go:84] Creating CNI manager for ""
	I0704 01:07:54.468893 1195693 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0704 01:07:54.468904 1195693 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0704 01:07:54.468983 1195693 start.go:340] cluster config:
	{Name:download-only-327632 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.20.0 ClusterName:download-only-327632 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Co
ntainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.20.0 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0704 01:07:54.471123 1195693 out.go:97] Starting "download-only-327632" primary control-plane node in "download-only-327632" cluster
	I0704 01:07:54.471143 1195693 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0704 01:07:54.473314 1195693 out.go:97] Pulling base image v0.0.44-1719972989-19184 ...
	I0704 01:07:54.473340 1195693 preload.go:132] Checking if preload exists for k8s version v1.20.0 and runtime containerd
	I0704 01:07:54.473521 1195693 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 in local docker daemon
	I0704 01:07:54.487597 1195693 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 to local cache
	I0704 01:07:54.487766 1195693 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 in local cache directory
	I0704 01:07:54.487865 1195693 image.go:118] Writing gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 to local cache
	I0704 01:07:54.542052 1195693 preload.go:119] Found remote preload: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/v18/v1.20.0/preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4
	I0704 01:07:54.542078 1195693 cache.go:56] Caching tarball of preloaded images
	I0704 01:07:54.542247 1195693 preload.go:132] Checking if preload exists for k8s version v1.20.0 and runtime containerd
	I0704 01:07:54.544485 1195693 out.go:97] Downloading Kubernetes v1.20.0 preload ...
	I0704 01:07:54.544516 1195693 preload.go:237] getting checksum for preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4 ...
	I0704 01:07:54.652780 1195693 download.go:107] Downloading: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/v18/v1.20.0/preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4?checksum=md5:7e3d48ccb9f143791669d02e14ce1643 -> /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4
	
	
	* The control-plane node download-only-327632 host does not exist
	  To start a cluster, run: "minikube start -p download-only-327632"

                                                
                                                
-- /stdout --
aaa_download_only_test.go:185: minikube logs failed with error: exit status 85
--- PASS: TestDownloadOnly/v1.20.0/LogsDuration (0.08s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/DeleteAll (0.21s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/DeleteAll
aaa_download_only_test.go:197: (dbg) Run:  out/minikube-linux-arm64 delete --all
--- PASS: TestDownloadOnly/v1.20.0/DeleteAll (0.21s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds (0.13s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds
aaa_download_only_test.go:208: (dbg) Run:  out/minikube-linux-arm64 delete -p download-only-327632
--- PASS: TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds (0.13s)

                                                
                                    
x
+
TestDownloadOnly/v1.30.2/json-events (7.09s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.30.2/json-events
aaa_download_only_test.go:81: (dbg) Run:  out/minikube-linux-arm64 start -o=json --download-only -p download-only-526823 --force --alsologtostderr --kubernetes-version=v1.30.2 --container-runtime=containerd --driver=docker  --container-runtime=containerd
aaa_download_only_test.go:81: (dbg) Done: out/minikube-linux-arm64 start -o=json --download-only -p download-only-526823 --force --alsologtostderr --kubernetes-version=v1.30.2 --container-runtime=containerd --driver=docker  --container-runtime=containerd: (7.092452758s)
--- PASS: TestDownloadOnly/v1.30.2/json-events (7.09s)

                                                
                                    
x
+
TestDownloadOnly/v1.30.2/preload-exists (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.30.2/preload-exists
--- PASS: TestDownloadOnly/v1.30.2/preload-exists (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.30.2/LogsDuration (0.08s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.30.2/LogsDuration
aaa_download_only_test.go:184: (dbg) Run:  out/minikube-linux-arm64 logs -p download-only-526823
aaa_download_only_test.go:184: (dbg) Non-zero exit: out/minikube-linux-arm64 logs -p download-only-526823: exit status 85 (78.973523ms)

                                                
                                                
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------|----------------------|---------|---------|---------------------|---------------------|
	| Command |              Args              |       Profile        |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------|----------------------|---------|---------|---------------------|---------------------|
	| start   | -o=json --download-only        | download-only-327632 | jenkins | v1.33.1 | 04 Jul 24 01:07 UTC |                     |
	|         | -p download-only-327632        |                      |         |         |                     |                     |
	|         | --force --alsologtostderr      |                      |         |         |                     |                     |
	|         | --kubernetes-version=v1.20.0   |                      |         |         |                     |                     |
	|         | --container-runtime=containerd |                      |         |         |                     |                     |
	|         | --driver=docker                |                      |         |         |                     |                     |
	|         | --container-runtime=containerd |                      |         |         |                     |                     |
	| delete  | --all                          | minikube             | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| delete  | -p download-only-327632        | download-only-327632 | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC | 04 Jul 24 01:08 UTC |
	| start   | -o=json --download-only        | download-only-526823 | jenkins | v1.33.1 | 04 Jul 24 01:08 UTC |                     |
	|         | -p download-only-526823        |                      |         |         |                     |                     |
	|         | --force --alsologtostderr      |                      |         |         |                     |                     |
	|         | --kubernetes-version=v1.30.2   |                      |         |         |                     |                     |
	|         | --container-runtime=containerd |                      |         |         |                     |                     |
	|         | --driver=docker                |                      |         |         |                     |                     |
	|         | --container-runtime=containerd |                      |         |         |                     |                     |
	|---------|--------------------------------|----------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/07/04 01:08:03
	Running on machine: ip-172-31-30-239
	Binary: Built with gc go1.22.4 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0704 01:08:03.253199 1195892 out.go:291] Setting OutFile to fd 1 ...
	I0704 01:08:03.253326 1195892 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:08:03.253336 1195892 out.go:304] Setting ErrFile to fd 2...
	I0704 01:08:03.253341 1195892 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:08:03.253591 1195892 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
	I0704 01:08:03.253985 1195892 out.go:298] Setting JSON to true
	I0704 01:08:03.254846 1195892 start.go:129] hostinfo: {"hostname":"ip-172-31-30-239","uptime":24634,"bootTime":1720030650,"procs":172,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1064-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"92f46a7d-c249-4c12-924a-77f64874c910"}
	I0704 01:08:03.254918 1195892 start.go:139] virtualization:  
	I0704 01:08:03.257499 1195892 out.go:97] [download-only-526823] minikube v1.33.1 on Ubuntu 20.04 (arm64)
	I0704 01:08:03.257697 1195892 notify.go:220] Checking for updates...
	I0704 01:08:03.259489 1195892 out.go:169] MINIKUBE_LOCATION=18859
	I0704 01:08:03.261039 1195892 out.go:169] MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0704 01:08:03.263076 1195892 out.go:169] KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig
	I0704 01:08:03.265337 1195892 out.go:169] MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube
	I0704 01:08:03.267116 1195892 out.go:169] MINIKUBE_BIN=out/minikube-linux-arm64
	W0704 01:08:03.270572 1195892 out.go:267] minikube skips various validations when --force is supplied; this may lead to unexpected behavior
	I0704 01:08:03.270814 1195892 driver.go:392] Setting default libvirt URI to qemu:///system
	I0704 01:08:03.291589 1195892 docker.go:122] docker version: linux-27.0.3:Docker Engine - Community
	I0704 01:08:03.291702 1195892 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 01:08:03.362267 1195892 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:29 OomKillDisable:true NGoroutines:45 SystemTime:2024-07-04 01:08:03.352206231 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 01:08:03.362384 1195892 docker.go:295] overlay module found
	I0704 01:08:03.364547 1195892 out.go:97] Using the docker driver based on user configuration
	I0704 01:08:03.364574 1195892 start.go:297] selected driver: docker
	I0704 01:08:03.364581 1195892 start.go:901] validating driver "docker" against <nil>
	I0704 01:08:03.364695 1195892 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 01:08:03.417768 1195892 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:29 OomKillDisable:true NGoroutines:45 SystemTime:2024-07-04 01:08:03.409134237 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 01:08:03.417938 1195892 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0704 01:08:03.418216 1195892 start_flags.go:393] Using suggested 2200MB memory alloc based on sys=7834MB, container=7834MB
	I0704 01:08:03.418368 1195892 start_flags.go:929] Wait components to verify : map[apiserver:true system_pods:true]
	I0704 01:08:03.420396 1195892 out.go:169] Using Docker driver with root privileges
	I0704 01:08:03.422438 1195892 cni.go:84] Creating CNI manager for ""
	I0704 01:08:03.422459 1195892 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0704 01:08:03.422471 1195892 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0704 01:08:03.422544 1195892 start.go:340] cluster config:
	{Name:download-only-526823 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:download-only-526823 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Co
ntainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0704 01:08:03.424614 1195892 out.go:97] Starting "download-only-526823" primary control-plane node in "download-only-526823" cluster
	I0704 01:08:03.424635 1195892 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0704 01:08:03.426947 1195892 out.go:97] Pulling base image v0.0.44-1719972989-19184 ...
	I0704 01:08:03.426977 1195892 preload.go:132] Checking if preload exists for k8s version v1.30.2 and runtime containerd
	I0704 01:08:03.427137 1195892 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 in local docker daemon
	I0704 01:08:03.441973 1195892 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 to local cache
	I0704 01:08:03.442093 1195892 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 in local cache directory
	I0704 01:08:03.442118 1195892 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 in local cache directory, skipping pull
	I0704 01:08:03.442127 1195892 image.go:105] gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 exists in cache, skipping pull
	I0704 01:08:03.442134 1195892 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 as a tarball
	I0704 01:08:03.485785 1195892 preload.go:119] Found remote preload: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/v18/v1.30.2/preloaded-images-k8s-v18-v1.30.2-containerd-overlay2-arm64.tar.lz4
	I0704 01:08:03.485811 1195892 cache.go:56] Caching tarball of preloaded images
	I0704 01:08:03.485981 1195892 preload.go:132] Checking if preload exists for k8s version v1.30.2 and runtime containerd
	I0704 01:08:03.489023 1195892 out.go:97] Downloading Kubernetes v1.30.2 preload ...
	I0704 01:08:03.489052 1195892 preload.go:237] getting checksum for preloaded-images-k8s-v18-v1.30.2-containerd-overlay2-arm64.tar.lz4 ...
	I0704 01:08:03.602079 1195892 download.go:107] Downloading: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/v18/v1.30.2/preloaded-images-k8s-v18-v1.30.2-containerd-overlay2-arm64.tar.lz4?checksum=md5:5f38272c206cc90312ddc23a9bcf8a1f -> /home/jenkins/minikube-integration/18859-1190282/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.30.2-containerd-overlay2-arm64.tar.lz4
	
	
	* The control-plane node download-only-526823 host does not exist
	  To start a cluster, run: "minikube start -p download-only-526823"

                                                
                                                
-- /stdout --
aaa_download_only_test.go:185: minikube logs failed with error: exit status 85
--- PASS: TestDownloadOnly/v1.30.2/LogsDuration (0.08s)

                                                
                                    
x
+
TestDownloadOnly/v1.30.2/DeleteAll (0.2s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.30.2/DeleteAll
aaa_download_only_test.go:197: (dbg) Run:  out/minikube-linux-arm64 delete --all
--- PASS: TestDownloadOnly/v1.30.2/DeleteAll (0.20s)

                                                
                                    
x
+
TestDownloadOnly/v1.30.2/DeleteAlwaysSucceeds (0.13s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.30.2/DeleteAlwaysSucceeds
aaa_download_only_test.go:208: (dbg) Run:  out/minikube-linux-arm64 delete -p download-only-526823
--- PASS: TestDownloadOnly/v1.30.2/DeleteAlwaysSucceeds (0.13s)

                                                
                                    
x
+
TestBinaryMirror (0.55s)

                                                
                                                
=== RUN   TestBinaryMirror
aaa_download_only_test.go:314: (dbg) Run:  out/minikube-linux-arm64 start --download-only -p binary-mirror-236711 --alsologtostderr --binary-mirror http://127.0.0.1:35737 --driver=docker  --container-runtime=containerd
helpers_test.go:175: Cleaning up "binary-mirror-236711" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p binary-mirror-236711
--- PASS: TestBinaryMirror (0.55s)

                                                
                                    
x
+
TestAddons/PreSetup/EnablingAddonOnNonExistingCluster (0.06s)

                                                
                                                
=== RUN   TestAddons/PreSetup/EnablingAddonOnNonExistingCluster
=== PAUSE TestAddons/PreSetup/EnablingAddonOnNonExistingCluster

                                                
                                                

                                                
                                                
=== CONT  TestAddons/PreSetup/EnablingAddonOnNonExistingCluster
addons_test.go:1029: (dbg) Run:  out/minikube-linux-arm64 addons enable dashboard -p addons-155517
addons_test.go:1029: (dbg) Non-zero exit: out/minikube-linux-arm64 addons enable dashboard -p addons-155517: exit status 85 (63.913097ms)

                                                
                                                
-- stdout --
	* Profile "addons-155517" not found. Run "minikube profile list" to view all profiles.
	  To start a cluster, run: "minikube start -p addons-155517"

                                                
                                                
-- /stdout --
--- PASS: TestAddons/PreSetup/EnablingAddonOnNonExistingCluster (0.06s)

                                                
                                    
x
+
TestAddons/PreSetup/DisablingAddonOnNonExistingCluster (0.07s)

                                                
                                                
=== RUN   TestAddons/PreSetup/DisablingAddonOnNonExistingCluster
=== PAUSE TestAddons/PreSetup/DisablingAddonOnNonExistingCluster

                                                
                                                

                                                
                                                
=== CONT  TestAddons/PreSetup/DisablingAddonOnNonExistingCluster
addons_test.go:1040: (dbg) Run:  out/minikube-linux-arm64 addons disable dashboard -p addons-155517
addons_test.go:1040: (dbg) Non-zero exit: out/minikube-linux-arm64 addons disable dashboard -p addons-155517: exit status 85 (68.684795ms)

                                                
                                                
-- stdout --
	* Profile "addons-155517" not found. Run "minikube profile list" to view all profiles.
	  To start a cluster, run: "minikube start -p addons-155517"

                                                
                                                
-- /stdout --
--- PASS: TestAddons/PreSetup/DisablingAddonOnNonExistingCluster (0.07s)

                                                
                                    
x
+
TestAddons/Setup (160.98s)

                                                
                                                
=== RUN   TestAddons/Setup
addons_test.go:110: (dbg) Run:  out/minikube-linux-arm64 start -p addons-155517 --wait=true --memory=4000 --alsologtostderr --addons=registry --addons=metrics-server --addons=volumesnapshots --addons=csi-hostpath-driver --addons=gcp-auth --addons=cloud-spanner --addons=inspektor-gadget --addons=storage-provisioner-rancher --addons=nvidia-device-plugin --addons=yakd --addons=volcano --driver=docker  --container-runtime=containerd --addons=ingress --addons=ingress-dns
addons_test.go:110: (dbg) Done: out/minikube-linux-arm64 start -p addons-155517 --wait=true --memory=4000 --alsologtostderr --addons=registry --addons=metrics-server --addons=volumesnapshots --addons=csi-hostpath-driver --addons=gcp-auth --addons=cloud-spanner --addons=inspektor-gadget --addons=storage-provisioner-rancher --addons=nvidia-device-plugin --addons=yakd --addons=volcano --driver=docker  --container-runtime=containerd --addons=ingress --addons=ingress-dns: (2m40.982714721s)
--- PASS: TestAddons/Setup (160.98s)

                                                
                                    
x
+
TestAddons/parallel/Registry (15.66s)

                                                
                                                
=== RUN   TestAddons/parallel/Registry
=== PAUSE TestAddons/parallel/Registry

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Registry
addons_test.go:332: registry stabilized in 50.079809ms
addons_test.go:334: (dbg) TestAddons/parallel/Registry: waiting 6m0s for pods matching "actual-registry=true" in namespace "kube-system" ...
helpers_test.go:344: "registry-dm5v6" [15499167-b529-4f01-b177-75b6be18e2b5] Running
addons_test.go:334: (dbg) TestAddons/parallel/Registry: actual-registry=true healthy within 6.01093802s
addons_test.go:337: (dbg) TestAddons/parallel/Registry: waiting 10m0s for pods matching "registry-proxy=true" in namespace "kube-system" ...
helpers_test.go:344: "registry-proxy-fndt4" [36d06042-3c9e-400c-a673-4f8f17e23b46] Running
addons_test.go:337: (dbg) TestAddons/parallel/Registry: registry-proxy=true healthy within 5.011911324s
addons_test.go:342: (dbg) Run:  kubectl --context addons-155517 delete po -l run=registry-test --now
addons_test.go:347: (dbg) Run:  kubectl --context addons-155517 run --rm registry-test --restart=Never --image=gcr.io/k8s-minikube/busybox -it -- sh -c "wget --spider -S http://registry.kube-system.svc.cluster.local"
addons_test.go:347: (dbg) Done: kubectl --context addons-155517 run --rm registry-test --restart=Never --image=gcr.io/k8s-minikube/busybox -it -- sh -c "wget --spider -S http://registry.kube-system.svc.cluster.local": (3.547510257s)
addons_test.go:361: (dbg) Run:  out/minikube-linux-arm64 -p addons-155517 ip
2024/07/04 01:11:08 [DEBUG] GET http://192.168.49.2:5000
addons_test.go:390: (dbg) Run:  out/minikube-linux-arm64 -p addons-155517 addons disable registry --alsologtostderr -v=1
--- PASS: TestAddons/parallel/Registry (15.66s)

                                                
                                    
x
+
TestAddons/parallel/MetricsServer (6.91s)

                                                
                                                
=== RUN   TestAddons/parallel/MetricsServer
=== PAUSE TestAddons/parallel/MetricsServer

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/MetricsServer
addons_test.go:409: metrics-server stabilized in 2.688131ms
addons_test.go:411: (dbg) TestAddons/parallel/MetricsServer: waiting 6m0s for pods matching "k8s-app=metrics-server" in namespace "kube-system" ...
helpers_test.go:344: "metrics-server-c59844bb4-csqts" [d63007ff-883d-4b4c-b4e4-b83cf3f5e613] Running
addons_test.go:411: (dbg) TestAddons/parallel/MetricsServer: k8s-app=metrics-server healthy within 6.00471033s
addons_test.go:417: (dbg) Run:  kubectl --context addons-155517 top pods -n kube-system
addons_test.go:434: (dbg) Run:  out/minikube-linux-arm64 -p addons-155517 addons disable metrics-server --alsologtostderr -v=1
--- PASS: TestAddons/parallel/MetricsServer (6.91s)

                                                
                                    
x
+
TestAddons/parallel/CSI (34.69s)

                                                
                                                
=== RUN   TestAddons/parallel/CSI
=== PAUSE TestAddons/parallel/CSI

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/CSI
addons_test.go:563: csi-hostpath-driver pods stabilized in 6.754867ms
addons_test.go:566: (dbg) Run:  kubectl --context addons-155517 create -f testdata/csi-hostpath-driver/pvc.yaml
addons_test.go:571: (dbg) TestAddons/parallel/CSI: waiting 6m0s for pvc "hpvc" in namespace "default" ...
helpers_test.go:394: (dbg) Run:  kubectl --context addons-155517 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-155517 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-155517 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-155517 get pvc hpvc -o jsonpath={.status.phase} -n default
addons_test.go:576: (dbg) Run:  kubectl --context addons-155517 create -f testdata/csi-hostpath-driver/pv-pod.yaml
addons_test.go:581: (dbg) TestAddons/parallel/CSI: waiting 6m0s for pods matching "app=task-pv-pod" in namespace "default" ...
helpers_test.go:344: "task-pv-pod" [d11c86ea-a1c3-4d6d-a3c1-eaaf147df233] Pending
helpers_test.go:344: "task-pv-pod" [d11c86ea-a1c3-4d6d-a3c1-eaaf147df233] Pending / Ready:ContainersNotReady (containers with unready status: [task-pv-container]) / ContainersReady:ContainersNotReady (containers with unready status: [task-pv-container])
helpers_test.go:344: "task-pv-pod" [d11c86ea-a1c3-4d6d-a3c1-eaaf147df233] Running
addons_test.go:581: (dbg) TestAddons/parallel/CSI: app=task-pv-pod healthy within 12.003944807s
addons_test.go:586: (dbg) Run:  kubectl --context addons-155517 create -f testdata/csi-hostpath-driver/snapshot.yaml
addons_test.go:591: (dbg) TestAddons/parallel/CSI: waiting 6m0s for volume snapshot "new-snapshot-demo" in namespace "default" ...
helpers_test.go:419: (dbg) Run:  kubectl --context addons-155517 get volumesnapshot new-snapshot-demo -o jsonpath={.status.readyToUse} -n default
helpers_test.go:419: (dbg) Run:  kubectl --context addons-155517 get volumesnapshot new-snapshot-demo -o jsonpath={.status.readyToUse} -n default
addons_test.go:596: (dbg) Run:  kubectl --context addons-155517 delete pod task-pv-pod
addons_test.go:602: (dbg) Run:  kubectl --context addons-155517 delete pvc hpvc
addons_test.go:608: (dbg) Run:  kubectl --context addons-155517 create -f testdata/csi-hostpath-driver/pvc-restore.yaml
addons_test.go:613: (dbg) TestAddons/parallel/CSI: waiting 6m0s for pvc "hpvc-restore" in namespace "default" ...
helpers_test.go:394: (dbg) Run:  kubectl --context addons-155517 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-155517 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
addons_test.go:618: (dbg) Run:  kubectl --context addons-155517 create -f testdata/csi-hostpath-driver/pv-pod-restore.yaml
addons_test.go:623: (dbg) TestAddons/parallel/CSI: waiting 6m0s for pods matching "app=task-pv-pod-restore" in namespace "default" ...
helpers_test.go:344: "task-pv-pod-restore" [7246ce07-62b4-40db-bc53-7dec47d8c4d0] Pending
helpers_test.go:344: "task-pv-pod-restore" [7246ce07-62b4-40db-bc53-7dec47d8c4d0] Pending / Ready:ContainersNotReady (containers with unready status: [task-pv-container]) / ContainersReady:ContainersNotReady (containers with unready status: [task-pv-container])
helpers_test.go:344: "task-pv-pod-restore" [7246ce07-62b4-40db-bc53-7dec47d8c4d0] Running
addons_test.go:623: (dbg) TestAddons/parallel/CSI: app=task-pv-pod-restore healthy within 7.003958036s
addons_test.go:628: (dbg) Run:  kubectl --context addons-155517 delete pod task-pv-pod-restore
addons_test.go:632: (dbg) Run:  kubectl --context addons-155517 delete pvc hpvc-restore
addons_test.go:636: (dbg) Run:  kubectl --context addons-155517 delete volumesnapshot new-snapshot-demo
addons_test.go:640: (dbg) Run:  out/minikube-linux-arm64 -p addons-155517 addons disable csi-hostpath-driver --alsologtostderr -v=1
addons_test.go:640: (dbg) Done: out/minikube-linux-arm64 -p addons-155517 addons disable csi-hostpath-driver --alsologtostderr -v=1: (6.964632108s)
addons_test.go:644: (dbg) Run:  out/minikube-linux-arm64 -p addons-155517 addons disable volumesnapshots --alsologtostderr -v=1
--- PASS: TestAddons/parallel/CSI (34.69s)

                                                
                                    
x
+
TestAddons/parallel/Headlamp (11.07s)

                                                
                                                
=== RUN   TestAddons/parallel/Headlamp
=== PAUSE TestAddons/parallel/Headlamp

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Headlamp
addons_test.go:826: (dbg) Run:  out/minikube-linux-arm64 addons enable headlamp -p addons-155517 --alsologtostderr -v=1
addons_test.go:826: (dbg) Done: out/minikube-linux-arm64 addons enable headlamp -p addons-155517 --alsologtostderr -v=1: (1.069941619s)
addons_test.go:831: (dbg) TestAddons/parallel/Headlamp: waiting 8m0s for pods matching "app.kubernetes.io/name=headlamp" in namespace "headlamp" ...
helpers_test.go:344: "headlamp-7867546754-xmxdd" [5a068e2e-8bdd-4e43-854a-8518aee06844] Pending / Ready:ContainersNotReady (containers with unready status: [headlamp]) / ContainersReady:ContainersNotReady (containers with unready status: [headlamp])
helpers_test.go:344: "headlamp-7867546754-xmxdd" [5a068e2e-8bdd-4e43-854a-8518aee06844] Running
addons_test.go:831: (dbg) TestAddons/parallel/Headlamp: app.kubernetes.io/name=headlamp healthy within 10.003574475s
--- PASS: TestAddons/parallel/Headlamp (11.07s)

                                                
                                    
x
+
TestAddons/parallel/CloudSpanner (5.6s)

                                                
                                                
=== RUN   TestAddons/parallel/CloudSpanner
=== PAUSE TestAddons/parallel/CloudSpanner

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/CloudSpanner
addons_test.go:859: (dbg) TestAddons/parallel/CloudSpanner: waiting 6m0s for pods matching "app=cloud-spanner-emulator" in namespace "default" ...
helpers_test.go:344: "cloud-spanner-emulator-6fcd4f6f98-lmlch" [17b843e5-9e47-4c84-b09f-15250c17c3e7] Running
addons_test.go:859: (dbg) TestAddons/parallel/CloudSpanner: app=cloud-spanner-emulator healthy within 5.004088362s
addons_test.go:862: (dbg) Run:  out/minikube-linux-arm64 addons disable cloud-spanner -p addons-155517
--- PASS: TestAddons/parallel/CloudSpanner (5.60s)

                                                
                                    
x
+
TestAddons/parallel/LocalPath (52.75s)

                                                
                                                
=== RUN   TestAddons/parallel/LocalPath
=== PAUSE TestAddons/parallel/LocalPath

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/LocalPath
addons_test.go:974: (dbg) Run:  kubectl --context addons-155517 apply -f testdata/storage-provisioner-rancher/pvc.yaml
addons_test.go:980: (dbg) Run:  kubectl --context addons-155517 apply -f testdata/storage-provisioner-rancher/pod.yaml
addons_test.go:984: (dbg) TestAddons/parallel/LocalPath: waiting 5m0s for pvc "test-pvc" in namespace "default" ...
helpers_test.go:394: (dbg) Run:  kubectl --context addons-155517 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-155517 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-155517 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-155517 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-155517 get pvc test-pvc -o jsonpath={.status.phase} -n default
addons_test.go:987: (dbg) TestAddons/parallel/LocalPath: waiting 3m0s for pods matching "run=test-local-path" in namespace "default" ...
helpers_test.go:344: "test-local-path" [e3e742fd-ae70-49c9-af74-57e2aed8515a] Pending / Ready:ContainersNotReady (containers with unready status: [busybox]) / ContainersReady:ContainersNotReady (containers with unready status: [busybox])
helpers_test.go:344: "test-local-path" [e3e742fd-ae70-49c9-af74-57e2aed8515a] Pending / Initialized:PodCompleted / Ready:PodCompleted / ContainersReady:PodCompleted
helpers_test.go:344: "test-local-path" [e3e742fd-ae70-49c9-af74-57e2aed8515a] Succeeded / Initialized:PodCompleted / Ready:PodCompleted / ContainersReady:PodCompleted
addons_test.go:987: (dbg) TestAddons/parallel/LocalPath: run=test-local-path healthy within 4.003493672s
addons_test.go:992: (dbg) Run:  kubectl --context addons-155517 get pvc test-pvc -o=json
addons_test.go:1001: (dbg) Run:  out/minikube-linux-arm64 -p addons-155517 ssh "cat /opt/local-path-provisioner/pvc-c05b7d52-4c97-4ba9-8a04-478d46aaf85d_default_test-pvc/file1"
addons_test.go:1013: (dbg) Run:  kubectl --context addons-155517 delete pod test-local-path
addons_test.go:1017: (dbg) Run:  kubectl --context addons-155517 delete pvc test-pvc
addons_test.go:1021: (dbg) Run:  out/minikube-linux-arm64 -p addons-155517 addons disable storage-provisioner-rancher --alsologtostderr -v=1
addons_test.go:1021: (dbg) Done: out/minikube-linux-arm64 -p addons-155517 addons disable storage-provisioner-rancher --alsologtostderr -v=1: (43.565971181s)
--- PASS: TestAddons/parallel/LocalPath (52.75s)

                                                
                                    
x
+
TestAddons/parallel/NvidiaDevicePlugin (5.56s)

                                                
                                                
=== RUN   TestAddons/parallel/NvidiaDevicePlugin
=== PAUSE TestAddons/parallel/NvidiaDevicePlugin

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/NvidiaDevicePlugin
addons_test.go:1053: (dbg) TestAddons/parallel/NvidiaDevicePlugin: waiting 6m0s for pods matching "name=nvidia-device-plugin-ds" in namespace "kube-system" ...
helpers_test.go:344: "nvidia-device-plugin-daemonset-gr25g" [8a2a5166-3ff2-4a7c-8665-62fade8bd24f] Running
addons_test.go:1053: (dbg) TestAddons/parallel/NvidiaDevicePlugin: name=nvidia-device-plugin-ds healthy within 5.004541143s
addons_test.go:1056: (dbg) Run:  out/minikube-linux-arm64 addons disable nvidia-device-plugin -p addons-155517
--- PASS: TestAddons/parallel/NvidiaDevicePlugin (5.56s)

                                                
                                    
x
+
TestAddons/parallel/Yakd (5.01s)

                                                
                                                
=== RUN   TestAddons/parallel/Yakd
=== PAUSE TestAddons/parallel/Yakd

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Yakd
addons_test.go:1064: (dbg) TestAddons/parallel/Yakd: waiting 2m0s for pods matching "app.kubernetes.io/name=yakd-dashboard" in namespace "yakd-dashboard" ...
helpers_test.go:344: "yakd-dashboard-799879c74f-gdj2b" [37d4b311-cf51-49f7-b53a-e40952b05d4a] Running
addons_test.go:1064: (dbg) TestAddons/parallel/Yakd: app.kubernetes.io/name=yakd-dashboard healthy within 5.004111918s
--- PASS: TestAddons/parallel/Yakd (5.01s)

                                                
                                    
x
+
TestCertOptions (35.28s)

                                                
                                                
=== RUN   TestCertOptions
=== PAUSE TestCertOptions

                                                
                                                

                                                
                                                
=== CONT  TestCertOptions
cert_options_test.go:49: (dbg) Run:  out/minikube-linux-arm64 start -p cert-options-196421 --memory=2048 --apiserver-ips=127.0.0.1 --apiserver-ips=192.168.15.15 --apiserver-names=localhost --apiserver-names=www.google.com --apiserver-port=8555 --driver=docker  --container-runtime=containerd
cert_options_test.go:49: (dbg) Done: out/minikube-linux-arm64 start -p cert-options-196421 --memory=2048 --apiserver-ips=127.0.0.1 --apiserver-ips=192.168.15.15 --apiserver-names=localhost --apiserver-names=www.google.com --apiserver-port=8555 --driver=docker  --container-runtime=containerd: (32.674330854s)
cert_options_test.go:60: (dbg) Run:  out/minikube-linux-arm64 -p cert-options-196421 ssh "openssl x509 -text -noout -in /var/lib/minikube/certs/apiserver.crt"
cert_options_test.go:88: (dbg) Run:  kubectl --context cert-options-196421 config view
cert_options_test.go:100: (dbg) Run:  out/minikube-linux-arm64 ssh -p cert-options-196421 -- "sudo cat /etc/kubernetes/admin.conf"
helpers_test.go:175: Cleaning up "cert-options-196421" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p cert-options-196421
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p cert-options-196421: (1.98059806s)
--- PASS: TestCertOptions (35.28s)

                                                
                                    
x
+
TestCertExpiration (229.58s)

                                                
                                                
=== RUN   TestCertExpiration
=== PAUSE TestCertExpiration

                                                
                                                

                                                
                                                
=== CONT  TestCertExpiration
cert_options_test.go:123: (dbg) Run:  out/minikube-linux-arm64 start -p cert-expiration-913882 --memory=2048 --cert-expiration=3m --driver=docker  --container-runtime=containerd
cert_options_test.go:123: (dbg) Done: out/minikube-linux-arm64 start -p cert-expiration-913882 --memory=2048 --cert-expiration=3m --driver=docker  --container-runtime=containerd: (39.55802607s)
cert_options_test.go:131: (dbg) Run:  out/minikube-linux-arm64 start -p cert-expiration-913882 --memory=2048 --cert-expiration=8760h --driver=docker  --container-runtime=containerd
cert_options_test.go:131: (dbg) Done: out/minikube-linux-arm64 start -p cert-expiration-913882 --memory=2048 --cert-expiration=8760h --driver=docker  --container-runtime=containerd: (7.67351143s)
helpers_test.go:175: Cleaning up "cert-expiration-913882" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p cert-expiration-913882
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p cert-expiration-913882: (2.343455966s)
--- PASS: TestCertExpiration (229.58s)

                                                
                                    
x
+
TestForceSystemdFlag (40.96s)

                                                
                                                
=== RUN   TestForceSystemdFlag
=== PAUSE TestForceSystemdFlag

                                                
                                                

                                                
                                                
=== CONT  TestForceSystemdFlag
docker_test.go:91: (dbg) Run:  out/minikube-linux-arm64 start -p force-systemd-flag-335395 --memory=2048 --force-systemd --alsologtostderr -v=5 --driver=docker  --container-runtime=containerd
docker_test.go:91: (dbg) Done: out/minikube-linux-arm64 start -p force-systemd-flag-335395 --memory=2048 --force-systemd --alsologtostderr -v=5 --driver=docker  --container-runtime=containerd: (38.281357418s)
docker_test.go:121: (dbg) Run:  out/minikube-linux-arm64 -p force-systemd-flag-335395 ssh "cat /etc/containerd/config.toml"
helpers_test.go:175: Cleaning up "force-systemd-flag-335395" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p force-systemd-flag-335395
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p force-systemd-flag-335395: (2.308809372s)
--- PASS: TestForceSystemdFlag (40.96s)

                                                
                                    
x
+
TestForceSystemdEnv (38.19s)

                                                
                                                
=== RUN   TestForceSystemdEnv
=== PAUSE TestForceSystemdEnv

                                                
                                                

                                                
                                                
=== CONT  TestForceSystemdEnv
docker_test.go:155: (dbg) Run:  out/minikube-linux-arm64 start -p force-systemd-env-942171 --memory=2048 --alsologtostderr -v=5 --driver=docker  --container-runtime=containerd
docker_test.go:155: (dbg) Done: out/minikube-linux-arm64 start -p force-systemd-env-942171 --memory=2048 --alsologtostderr -v=5 --driver=docker  --container-runtime=containerd: (34.977513144s)
docker_test.go:121: (dbg) Run:  out/minikube-linux-arm64 -p force-systemd-env-942171 ssh "cat /etc/containerd/config.toml"
helpers_test.go:175: Cleaning up "force-systemd-env-942171" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p force-systemd-env-942171
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p force-systemd-env-942171: (2.759520916s)
--- PASS: TestForceSystemdEnv (38.19s)

                                                
                                    
x
+
TestDockerEnvContainerd (46.89s)

                                                
                                                
=== RUN   TestDockerEnvContainerd
docker_test.go:170: running with containerd true linux arm64
docker_test.go:181: (dbg) Run:  out/minikube-linux-arm64 start -p dockerenv-767041 --driver=docker  --container-runtime=containerd
docker_test.go:181: (dbg) Done: out/minikube-linux-arm64 start -p dockerenv-767041 --driver=docker  --container-runtime=containerd: (30.713303073s)
docker_test.go:189: (dbg) Run:  /bin/bash -c "out/minikube-linux-arm64 docker-env --ssh-host --ssh-add -p dockerenv-767041"
docker_test.go:189: (dbg) Done: /bin/bash -c "out/minikube-linux-arm64 docker-env --ssh-host --ssh-add -p dockerenv-767041": (1.303789325s)
docker_test.go:220: (dbg) Run:  /bin/bash -c "SSH_AUTH_SOCK="/tmp/ssh-xovntPwxq32u/agent.1225113" SSH_AGENT_PID="1225114" DOCKER_HOST=ssh://docker@127.0.0.1:33946 docker version"
docker_test.go:243: (dbg) Run:  /bin/bash -c "SSH_AUTH_SOCK="/tmp/ssh-xovntPwxq32u/agent.1225113" SSH_AGENT_PID="1225114" DOCKER_HOST=ssh://docker@127.0.0.1:33946 DOCKER_BUILDKIT=0 docker build -t local/minikube-dockerenv-containerd-test:latest testdata/docker-env"
docker_test.go:243: (dbg) Done: /bin/bash -c "SSH_AUTH_SOCK="/tmp/ssh-xovntPwxq32u/agent.1225113" SSH_AGENT_PID="1225114" DOCKER_HOST=ssh://docker@127.0.0.1:33946 DOCKER_BUILDKIT=0 docker build -t local/minikube-dockerenv-containerd-test:latest testdata/docker-env": (1.403473473s)
docker_test.go:250: (dbg) Run:  /bin/bash -c "SSH_AUTH_SOCK="/tmp/ssh-xovntPwxq32u/agent.1225113" SSH_AGENT_PID="1225114" DOCKER_HOST=ssh://docker@127.0.0.1:33946 docker image ls"
helpers_test.go:175: Cleaning up "dockerenv-767041" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p dockerenv-767041
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p dockerenv-767041: (1.991509507s)
--- PASS: TestDockerEnvContainerd (46.89s)

                                                
                                    
x
+
TestErrorSpam/setup (33.7s)

                                                
                                                
=== RUN   TestErrorSpam/setup
error_spam_test.go:81: (dbg) Run:  out/minikube-linux-arm64 start -p nospam-273733 -n=1 --memory=2250 --wait=false --log_dir=/tmp/nospam-273733 --driver=docker  --container-runtime=containerd
error_spam_test.go:81: (dbg) Done: out/minikube-linux-arm64 start -p nospam-273733 -n=1 --memory=2250 --wait=false --log_dir=/tmp/nospam-273733 --driver=docker  --container-runtime=containerd: (33.701957505s)
--- PASS: TestErrorSpam/setup (33.70s)

                                                
                                    
x
+
TestErrorSpam/start (0.7s)

                                                
                                                
=== RUN   TestErrorSpam/start
error_spam_test.go:216: Cleaning up 1 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-273733 --log_dir /tmp/nospam-273733 start --dry-run
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-273733 --log_dir /tmp/nospam-273733 start --dry-run
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-273733 --log_dir /tmp/nospam-273733 start --dry-run
--- PASS: TestErrorSpam/start (0.70s)

                                                
                                    
x
+
TestErrorSpam/status (1.01s)

                                                
                                                
=== RUN   TestErrorSpam/status
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-273733 --log_dir /tmp/nospam-273733 status
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-273733 --log_dir /tmp/nospam-273733 status
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-273733 --log_dir /tmp/nospam-273733 status
--- PASS: TestErrorSpam/status (1.01s)

                                                
                                    
x
+
TestErrorSpam/pause (1.76s)

                                                
                                                
=== RUN   TestErrorSpam/pause
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-273733 --log_dir /tmp/nospam-273733 pause
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-273733 --log_dir /tmp/nospam-273733 pause
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-273733 --log_dir /tmp/nospam-273733 pause
--- PASS: TestErrorSpam/pause (1.76s)

                                                
                                    
x
+
TestErrorSpam/unpause (1.74s)

                                                
                                                
=== RUN   TestErrorSpam/unpause
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-273733 --log_dir /tmp/nospam-273733 unpause
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-273733 --log_dir /tmp/nospam-273733 unpause
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-273733 --log_dir /tmp/nospam-273733 unpause
--- PASS: TestErrorSpam/unpause (1.74s)

                                                
                                    
x
+
TestErrorSpam/stop (1.41s)

                                                
                                                
=== RUN   TestErrorSpam/stop
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-273733 --log_dir /tmp/nospam-273733 stop
error_spam_test.go:159: (dbg) Done: out/minikube-linux-arm64 -p nospam-273733 --log_dir /tmp/nospam-273733 stop: (1.231676265s)
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-273733 --log_dir /tmp/nospam-273733 stop
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-273733 --log_dir /tmp/nospam-273733 stop
--- PASS: TestErrorSpam/stop (1.41s)

                                                
                                    
x
+
TestFunctional/serial/CopySyncFile (0s)

                                                
                                                
=== RUN   TestFunctional/serial/CopySyncFile
functional_test.go:1851: local sync path: /home/jenkins/minikube-integration/18859-1190282/.minikube/files/etc/test/nested/copy/1195688/hosts
--- PASS: TestFunctional/serial/CopySyncFile (0.00s)

                                                
                                    
x
+
TestFunctional/serial/StartWithProxy (58.07s)

                                                
                                                
=== RUN   TestFunctional/serial/StartWithProxy
functional_test.go:2230: (dbg) Run:  out/minikube-linux-arm64 start -p functional-781779 --memory=4000 --apiserver-port=8441 --wait=all --driver=docker  --container-runtime=containerd
functional_test.go:2230: (dbg) Done: out/minikube-linux-arm64 start -p functional-781779 --memory=4000 --apiserver-port=8441 --wait=all --driver=docker  --container-runtime=containerd: (58.069945242s)
--- PASS: TestFunctional/serial/StartWithProxy (58.07s)

                                                
                                    
x
+
TestFunctional/serial/AuditLog (0s)

                                                
                                                
=== RUN   TestFunctional/serial/AuditLog
--- PASS: TestFunctional/serial/AuditLog (0.00s)

                                                
                                    
x
+
TestFunctional/serial/SoftStart (5.82s)

                                                
                                                
=== RUN   TestFunctional/serial/SoftStart
functional_test.go:655: (dbg) Run:  out/minikube-linux-arm64 start -p functional-781779 --alsologtostderr -v=8
E0704 01:50:53.143956 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
E0704 01:50:53.150456 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
E0704 01:50:53.160916 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
E0704 01:50:53.181280 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
E0704 01:50:53.221644 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
E0704 01:50:53.302377 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
E0704 01:50:53.462596 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
functional_test.go:655: (dbg) Done: out/minikube-linux-arm64 start -p functional-781779 --alsologtostderr -v=8: (5.815482243s)
functional_test.go:659: soft start took 5.818489135s for "functional-781779" cluster.
--- PASS: TestFunctional/serial/SoftStart (5.82s)

                                                
                                    
x
+
TestFunctional/serial/KubeContext (0.06s)

                                                
                                                
=== RUN   TestFunctional/serial/KubeContext
functional_test.go:677: (dbg) Run:  kubectl config current-context
--- PASS: TestFunctional/serial/KubeContext (0.06s)

                                                
                                    
x
+
TestFunctional/serial/KubectlGetPods (0.11s)

                                                
                                                
=== RUN   TestFunctional/serial/KubectlGetPods
functional_test.go:692: (dbg) Run:  kubectl --context functional-781779 get po -A
--- PASS: TestFunctional/serial/KubectlGetPods (0.11s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/add_remote (4.51s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/add_remote
functional_test.go:1045: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 cache add registry.k8s.io/pause:3.1
E0704 01:50:53.783142 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
E0704 01:50:54.423374 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
functional_test.go:1045: (dbg) Done: out/minikube-linux-arm64 -p functional-781779 cache add registry.k8s.io/pause:3.1: (1.605382478s)
functional_test.go:1045: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 cache add registry.k8s.io/pause:3.3
E0704 01:50:55.704098 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
functional_test.go:1045: (dbg) Done: out/minikube-linux-arm64 -p functional-781779 cache add registry.k8s.io/pause:3.3: (1.505942314s)
functional_test.go:1045: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 cache add registry.k8s.io/pause:latest
functional_test.go:1045: (dbg) Done: out/minikube-linux-arm64 -p functional-781779 cache add registry.k8s.io/pause:latest: (1.399292347s)
--- PASS: TestFunctional/serial/CacheCmd/cache/add_remote (4.51s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/add_local (1.31s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/add_local
functional_test.go:1073: (dbg) Run:  docker build -t minikube-local-cache-test:functional-781779 /tmp/TestFunctionalserialCacheCmdcacheadd_local2869601809/001
E0704 01:50:58.264469 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
functional_test.go:1085: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 cache add minikube-local-cache-test:functional-781779
functional_test.go:1090: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 cache delete minikube-local-cache-test:functional-781779
functional_test.go:1079: (dbg) Run:  docker rmi minikube-local-cache-test:functional-781779
--- PASS: TestFunctional/serial/CacheCmd/cache/add_local (1.31s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/CacheDelete (0.05s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/CacheDelete
functional_test.go:1098: (dbg) Run:  out/minikube-linux-arm64 cache delete registry.k8s.io/pause:3.3
--- PASS: TestFunctional/serial/CacheCmd/cache/CacheDelete (0.05s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/list (0.06s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/list
functional_test.go:1106: (dbg) Run:  out/minikube-linux-arm64 cache list
--- PASS: TestFunctional/serial/CacheCmd/cache/list (0.06s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node (0.3s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node
functional_test.go:1120: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh sudo crictl images
--- PASS: TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node (0.30s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/cache_reload (2.26s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/cache_reload
functional_test.go:1143: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh sudo crictl rmi registry.k8s.io/pause:latest
functional_test.go:1149: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh sudo crictl inspecti registry.k8s.io/pause:latest
functional_test.go:1149: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-781779 ssh sudo crictl inspecti registry.k8s.io/pause:latest: exit status 1 (288.017823ms)

                                                
                                                
-- stdout --
	FATA[0000] no such image "registry.k8s.io/pause:latest" present 

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test.go:1154: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 cache reload
functional_test.go:1154: (dbg) Done: out/minikube-linux-arm64 -p functional-781779 cache reload: (1.173056916s)
functional_test.go:1159: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh sudo crictl inspecti registry.k8s.io/pause:latest
--- PASS: TestFunctional/serial/CacheCmd/cache/cache_reload (2.26s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/delete (0.11s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/delete
functional_test.go:1168: (dbg) Run:  out/minikube-linux-arm64 cache delete registry.k8s.io/pause:3.1
functional_test.go:1168: (dbg) Run:  out/minikube-linux-arm64 cache delete registry.k8s.io/pause:latest
--- PASS: TestFunctional/serial/CacheCmd/cache/delete (0.11s)

                                                
                                    
x
+
TestFunctional/serial/MinikubeKubectlCmd (0.14s)

                                                
                                                
=== RUN   TestFunctional/serial/MinikubeKubectlCmd
functional_test.go:712: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 kubectl -- --context functional-781779 get pods
--- PASS: TestFunctional/serial/MinikubeKubectlCmd (0.14s)

                                                
                                    
x
+
TestFunctional/serial/MinikubeKubectlCmdDirectly (0.13s)

                                                
                                                
=== RUN   TestFunctional/serial/MinikubeKubectlCmdDirectly
functional_test.go:737: (dbg) Run:  out/kubectl --context functional-781779 get pods
--- PASS: TestFunctional/serial/MinikubeKubectlCmdDirectly (0.13s)

                                                
                                    
x
+
TestFunctional/serial/ExtraConfig (39.48s)

                                                
                                                
=== RUN   TestFunctional/serial/ExtraConfig
functional_test.go:753: (dbg) Run:  out/minikube-linux-arm64 start -p functional-781779 --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision --wait=all
E0704 01:51:03.385047 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
E0704 01:51:13.625245 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
E0704 01:51:34.105794 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
functional_test.go:753: (dbg) Done: out/minikube-linux-arm64 start -p functional-781779 --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision --wait=all: (39.479680818s)
functional_test.go:757: restart took 39.479796697s for "functional-781779" cluster.
--- PASS: TestFunctional/serial/ExtraConfig (39.48s)

                                                
                                    
x
+
TestFunctional/serial/ComponentHealth (0.13s)

                                                
                                                
=== RUN   TestFunctional/serial/ComponentHealth
functional_test.go:806: (dbg) Run:  kubectl --context functional-781779 get po -l tier=control-plane -n kube-system -o=json
functional_test.go:821: etcd phase: Running
functional_test.go:831: etcd status: Ready
functional_test.go:821: kube-apiserver phase: Running
functional_test.go:831: kube-apiserver status: Ready
functional_test.go:821: kube-controller-manager phase: Running
functional_test.go:831: kube-controller-manager status: Ready
functional_test.go:821: kube-scheduler phase: Running
functional_test.go:831: kube-scheduler status: Ready
--- PASS: TestFunctional/serial/ComponentHealth (0.13s)

                                                
                                    
x
+
TestFunctional/serial/LogsCmd (1.84s)

                                                
                                                
=== RUN   TestFunctional/serial/LogsCmd
functional_test.go:1232: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 logs
functional_test.go:1232: (dbg) Done: out/minikube-linux-arm64 -p functional-781779 logs: (1.842748939s)
--- PASS: TestFunctional/serial/LogsCmd (1.84s)

                                                
                                    
x
+
TestFunctional/serial/LogsFileCmd (1.98s)

                                                
                                                
=== RUN   TestFunctional/serial/LogsFileCmd
functional_test.go:1246: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 logs --file /tmp/TestFunctionalserialLogsFileCmd1364003917/001/logs.txt
functional_test.go:1246: (dbg) Done: out/minikube-linux-arm64 -p functional-781779 logs --file /tmp/TestFunctionalserialLogsFileCmd1364003917/001/logs.txt: (1.980204802s)
--- PASS: TestFunctional/serial/LogsFileCmd (1.98s)

                                                
                                    
x
+
TestFunctional/serial/InvalidService (3.78s)

                                                
                                                
=== RUN   TestFunctional/serial/InvalidService
functional_test.go:2317: (dbg) Run:  kubectl --context functional-781779 apply -f testdata/invalidsvc.yaml
functional_test.go:2331: (dbg) Run:  out/minikube-linux-arm64 service invalid-svc -p functional-781779
functional_test.go:2331: (dbg) Non-zero exit: out/minikube-linux-arm64 service invalid-svc -p functional-781779: exit status 115 (431.879992ms)

                                                
                                                
-- stdout --
	|-----------|-------------|-------------|---------------------------|
	| NAMESPACE |    NAME     | TARGET PORT |            URL            |
	|-----------|-------------|-------------|---------------------------|
	| default   | invalid-svc |          80 | http://192.168.49.2:30665 |
	|-----------|-------------|-------------|---------------------------|
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to SVC_UNREACHABLE: service not available: no running pod for service invalid-svc found
	* 
	╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│    * Please also attach the following file to the GitHub issue:                             │
	│    * - /tmp/minikube_service_96b204199e3191fa1740d4430b018a3c8028d52d_0.log                 │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯

                                                
                                                
** /stderr **
functional_test.go:2323: (dbg) Run:  kubectl --context functional-781779 delete -f testdata/invalidsvc.yaml
--- PASS: TestFunctional/serial/InvalidService (3.78s)

                                                
                                    
x
+
TestFunctional/parallel/ConfigCmd (0.47s)

                                                
                                                
=== RUN   TestFunctional/parallel/ConfigCmd
=== PAUSE TestFunctional/parallel/ConfigCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ConfigCmd
functional_test.go:1195: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 config unset cpus
functional_test.go:1195: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 config get cpus
functional_test.go:1195: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-781779 config get cpus: exit status 14 (74.899522ms)

                                                
                                                
** stderr ** 
	Error: specified key could not be found in config

                                                
                                                
** /stderr **
functional_test.go:1195: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 config set cpus 2
functional_test.go:1195: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 config get cpus
functional_test.go:1195: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 config unset cpus
functional_test.go:1195: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 config get cpus
functional_test.go:1195: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-781779 config get cpus: exit status 14 (65.476228ms)

                                                
                                                
** stderr ** 
	Error: specified key could not be found in config

                                                
                                                
** /stderr **
--- PASS: TestFunctional/parallel/ConfigCmd (0.47s)

                                                
                                    
x
+
TestFunctional/parallel/DashboardCmd (8.82s)

                                                
                                                
=== RUN   TestFunctional/parallel/DashboardCmd
=== PAUSE TestFunctional/parallel/DashboardCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/DashboardCmd
functional_test.go:901: (dbg) daemon: [out/minikube-linux-arm64 dashboard --url --port 36195 -p functional-781779 --alsologtostderr -v=1]
functional_test.go:906: (dbg) stopping [out/minikube-linux-arm64 dashboard --url --port 36195 -p functional-781779 --alsologtostderr -v=1] ...
helpers_test.go:508: unable to kill pid 1241220: os: process already finished
--- PASS: TestFunctional/parallel/DashboardCmd (8.82s)

                                                
                                    
x
+
TestFunctional/parallel/DryRun (0.9s)

                                                
                                                
=== RUN   TestFunctional/parallel/DryRun
=== PAUSE TestFunctional/parallel/DryRun

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/DryRun
functional_test.go:970: (dbg) Run:  out/minikube-linux-arm64 start -p functional-781779 --dry-run --memory 250MB --alsologtostderr --driver=docker  --container-runtime=containerd
functional_test.go:970: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p functional-781779 --dry-run --memory 250MB --alsologtostderr --driver=docker  --container-runtime=containerd: exit status 23 (552.487424ms)

                                                
                                                
-- stdout --
	* [functional-781779] minikube v1.33.1 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=18859
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	* Using the docker driver based on existing profile
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0704 01:52:26.912909 1240496 out.go:291] Setting OutFile to fd 1 ...
	I0704 01:52:26.913160 1240496 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:52:26.913203 1240496 out.go:304] Setting ErrFile to fd 2...
	I0704 01:52:26.913224 1240496 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:52:26.913511 1240496 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
	I0704 01:52:26.914111 1240496 out.go:298] Setting JSON to false
	I0704 01:52:26.915292 1240496 start.go:129] hostinfo: {"hostname":"ip-172-31-30-239","uptime":27297,"bootTime":1720030650,"procs":222,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1064-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"92f46a7d-c249-4c12-924a-77f64874c910"}
	I0704 01:52:26.915434 1240496 start.go:139] virtualization:  
	I0704 01:52:26.922092 1240496 out.go:177] * [functional-781779] minikube v1.33.1 on Ubuntu 20.04 (arm64)
	I0704 01:52:26.925245 1240496 out.go:177]   - MINIKUBE_LOCATION=18859
	I0704 01:52:26.925334 1240496 notify.go:220] Checking for updates...
	I0704 01:52:26.932013 1240496 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0704 01:52:26.934361 1240496 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig
	I0704 01:52:26.936045 1240496 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube
	I0704 01:52:26.937824 1240496 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0704 01:52:26.939591 1240496 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0704 01:52:26.942881 1240496 config.go:182] Loaded profile config "functional-781779": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 01:52:26.943771 1240496 driver.go:392] Setting default libvirt URI to qemu:///system
	I0704 01:52:27.022733 1240496 docker.go:122] docker version: linux-27.0.3:Docker Engine - Community
	I0704 01:52:27.022872 1240496 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 01:52:27.228424 1240496 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:33 OomKillDisable:true NGoroutines:53 SystemTime:2024-07-04 01:52:27.208405466 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 01:52:27.228536 1240496 docker.go:295] overlay module found
	I0704 01:52:27.232151 1240496 out.go:177] * Using the docker driver based on existing profile
	I0704 01:52:27.234114 1240496 start.go:297] selected driver: docker
	I0704 01:52:27.234135 1240496 start.go:901] validating driver "docker" against &{Name:functional-781779 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:functional-781779 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:do
cker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0704 01:52:27.234248 1240496 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0704 01:52:27.236649 1240496 out.go:177] 
	W0704 01:52:27.238737 1240496 out.go:239] X Exiting due to RSRC_INSUFFICIENT_REQ_MEMORY: Requested memory allocation 250MiB is less than the usable minimum of 1800MB
	X Exiting due to RSRC_INSUFFICIENT_REQ_MEMORY: Requested memory allocation 250MiB is less than the usable minimum of 1800MB
	I0704 01:52:27.240896 1240496 out.go:177] 

                                                
                                                
** /stderr **
functional_test.go:987: (dbg) Run:  out/minikube-linux-arm64 start -p functional-781779 --dry-run --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
--- PASS: TestFunctional/parallel/DryRun (0.90s)

                                                
                                    
x
+
TestFunctional/parallel/InternationalLanguage (0.37s)

                                                
                                                
=== RUN   TestFunctional/parallel/InternationalLanguage
=== PAUSE TestFunctional/parallel/InternationalLanguage

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/InternationalLanguage
functional_test.go:1016: (dbg) Run:  out/minikube-linux-arm64 start -p functional-781779 --dry-run --memory 250MB --alsologtostderr --driver=docker  --container-runtime=containerd
functional_test.go:1016: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p functional-781779 --dry-run --memory 250MB --alsologtostderr --driver=docker  --container-runtime=containerd: exit status 23 (370.705976ms)

                                                
                                                
-- stdout --
	* [functional-781779] minikube v1.33.1 sur Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=18859
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	* Utilisation du pilote docker basé sur le profil existant
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0704 01:52:26.394079 1240401 out.go:291] Setting OutFile to fd 1 ...
	I0704 01:52:26.394279 1240401 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:52:26.394293 1240401 out.go:304] Setting ErrFile to fd 2...
	I0704 01:52:26.394300 1240401 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:52:26.394743 1240401 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
	I0704 01:52:26.395192 1240401 out.go:298] Setting JSON to false
	I0704 01:52:26.396237 1240401 start.go:129] hostinfo: {"hostname":"ip-172-31-30-239","uptime":27297,"bootTime":1720030650,"procs":211,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1064-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"92f46a7d-c249-4c12-924a-77f64874c910"}
	I0704 01:52:26.396313 1240401 start.go:139] virtualization:  
	I0704 01:52:26.399664 1240401 out.go:177] * [functional-781779] minikube v1.33.1 sur Ubuntu 20.04 (arm64)
	I0704 01:52:26.401954 1240401 out.go:177]   - MINIKUBE_LOCATION=18859
	I0704 01:52:26.402082 1240401 notify.go:220] Checking for updates...
	I0704 01:52:26.411435 1240401 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0704 01:52:26.413221 1240401 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig
	I0704 01:52:26.415460 1240401 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube
	I0704 01:52:26.417177 1240401 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0704 01:52:26.419046 1240401 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0704 01:52:26.421257 1240401 config.go:182] Loaded profile config "functional-781779": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 01:52:26.421821 1240401 driver.go:392] Setting default libvirt URI to qemu:///system
	I0704 01:52:26.475877 1240401 docker.go:122] docker version: linux-27.0.3:Docker Engine - Community
	I0704 01:52:26.475976 1240401 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 01:52:26.677005 1240401 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:2 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:33 OomKillDisable:true NGoroutines:53 SystemTime:2024-07-04 01:52:26.666388854 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 01:52:26.677114 1240401 docker.go:295] overlay module found
	I0704 01:52:26.679688 1240401 out.go:177] * Utilisation du pilote docker basé sur le profil existant
	I0704 01:52:26.681450 1240401 start.go:297] selected driver: docker
	I0704 01:52:26.681473 1240401 start.go:901] validating driver "docker" against &{Name:functional-781779 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.44-1719972989-19184@sha256:86cb76941aa00fc70e665895234bda20991d5563e39b8ff07212e31a82ce7fb1 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.30.2 ClusterName:functional-781779 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.30.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:do
cker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0704 01:52:26.681591 1240401 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0704 01:52:26.684262 1240401 out.go:177] 
	W0704 01:52:26.686166 1240401 out.go:239] X Fermeture en raison de RSRC_INSUFFICIENT_REQ_MEMORY : L'allocation de mémoire demandée 250 Mio est inférieure au minimum utilisable de 1800 Mo
	X Fermeture en raison de RSRC_INSUFFICIENT_REQ_MEMORY : L'allocation de mémoire demandée 250 Mio est inférieure au minimum utilisable de 1800 Mo
	I0704 01:52:26.689109 1240401 out.go:177] 

                                                
                                                
** /stderr **
--- PASS: TestFunctional/parallel/InternationalLanguage (0.37s)

                                                
                                    
x
+
TestFunctional/parallel/StatusCmd (1.39s)

                                                
                                                
=== RUN   TestFunctional/parallel/StatusCmd
=== PAUSE TestFunctional/parallel/StatusCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/StatusCmd
functional_test.go:850: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 status
functional_test.go:856: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 status -f host:{{.Host}},kublet:{{.Kubelet}},apiserver:{{.APIServer}},kubeconfig:{{.Kubeconfig}}
functional_test.go:868: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 status -o json
--- PASS: TestFunctional/parallel/StatusCmd (1.39s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmdConnect (9.59s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmdConnect
=== PAUSE TestFunctional/parallel/ServiceCmdConnect

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ServiceCmdConnect
functional_test.go:1623: (dbg) Run:  kubectl --context functional-781779 create deployment hello-node-connect --image=registry.k8s.io/echoserver-arm:1.8
functional_test.go:1631: (dbg) Run:  kubectl --context functional-781779 expose deployment hello-node-connect --type=NodePort --port=8080
functional_test.go:1636: (dbg) TestFunctional/parallel/ServiceCmdConnect: waiting 10m0s for pods matching "app=hello-node-connect" in namespace "default" ...
helpers_test.go:344: "hello-node-connect-6f49f58cd5-nj7kw" [aa7709e2-d708-4925-b017-f04cf02dc9e2] Pending / Ready:ContainersNotReady (containers with unready status: [echoserver-arm]) / ContainersReady:ContainersNotReady (containers with unready status: [echoserver-arm])
helpers_test.go:344: "hello-node-connect-6f49f58cd5-nj7kw" [aa7709e2-d708-4925-b017-f04cf02dc9e2] Running
functional_test.go:1636: (dbg) TestFunctional/parallel/ServiceCmdConnect: app=hello-node-connect healthy within 9.003977883s
functional_test.go:1645: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 service hello-node-connect --url
functional_test.go:1651: found endpoint for hello-node-connect: http://192.168.49.2:30664
functional_test.go:1671: http://192.168.49.2:30664: success! body:

                                                
                                                

                                                
                                                
Hostname: hello-node-connect-6f49f58cd5-nj7kw

                                                
                                                
Pod Information:
	-no pod information available-

                                                
                                                
Server values:
	server_version=nginx: 1.13.3 - lua: 10008

                                                
                                                
Request Information:
	client_address=10.244.0.1
	method=GET
	real path=/
	query=
	request_version=1.1
	request_uri=http://192.168.49.2:8080/

                                                
                                                
Request Headers:
	accept-encoding=gzip
	host=192.168.49.2:30664
	user-agent=Go-http-client/1.1

                                                
                                                
Request Body:
	-no body in request-

                                                
                                                
--- PASS: TestFunctional/parallel/ServiceCmdConnect (9.59s)

                                                
                                    
x
+
TestFunctional/parallel/AddonsCmd (0.14s)

                                                
                                                
=== RUN   TestFunctional/parallel/AddonsCmd
=== PAUSE TestFunctional/parallel/AddonsCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/AddonsCmd
functional_test.go:1686: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 addons list
functional_test.go:1698: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 addons list -o json
--- PASS: TestFunctional/parallel/AddonsCmd (0.14s)

                                                
                                    
x
+
TestFunctional/parallel/PersistentVolumeClaim (25.6s)

                                                
                                                
=== RUN   TestFunctional/parallel/PersistentVolumeClaim
=== PAUSE TestFunctional/parallel/PersistentVolumeClaim

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/PersistentVolumeClaim
functional_test_pvc_test.go:44: (dbg) TestFunctional/parallel/PersistentVolumeClaim: waiting 4m0s for pods matching "integration-test=storage-provisioner" in namespace "kube-system" ...
helpers_test.go:344: "storage-provisioner" [e7af2734-0175-4301-a479-06276237b322] Running
functional_test_pvc_test.go:44: (dbg) TestFunctional/parallel/PersistentVolumeClaim: integration-test=storage-provisioner healthy within 6.003863879s
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-781779 get storageclass -o=json
functional_test_pvc_test.go:69: (dbg) Run:  kubectl --context functional-781779 apply -f testdata/storage-provisioner/pvc.yaml
functional_test_pvc_test.go:76: (dbg) Run:  kubectl --context functional-781779 get pvc myclaim -o=json
functional_test_pvc_test.go:125: (dbg) Run:  kubectl --context functional-781779 apply -f testdata/storage-provisioner/pod.yaml
functional_test_pvc_test.go:130: (dbg) TestFunctional/parallel/PersistentVolumeClaim: waiting 3m0s for pods matching "test=storage-provisioner" in namespace "default" ...
helpers_test.go:344: "sp-pod" [9e10bb1c-a4fa-42af-9695-d853e966f3b2] Pending
helpers_test.go:344: "sp-pod" [9e10bb1c-a4fa-42af-9695-d853e966f3b2] Pending / Ready:ContainersNotReady (containers with unready status: [myfrontend]) / ContainersReady:ContainersNotReady (containers with unready status: [myfrontend])
helpers_test.go:344: "sp-pod" [9e10bb1c-a4fa-42af-9695-d853e966f3b2] Running
functional_test_pvc_test.go:130: (dbg) TestFunctional/parallel/PersistentVolumeClaim: test=storage-provisioner healthy within 11.003465374s
functional_test_pvc_test.go:100: (dbg) Run:  kubectl --context functional-781779 exec sp-pod -- touch /tmp/mount/foo
functional_test_pvc_test.go:106: (dbg) Run:  kubectl --context functional-781779 delete -f testdata/storage-provisioner/pod.yaml
functional_test_pvc_test.go:106: (dbg) Done: kubectl --context functional-781779 delete -f testdata/storage-provisioner/pod.yaml: (1.61801843s)
functional_test_pvc_test.go:125: (dbg) Run:  kubectl --context functional-781779 apply -f testdata/storage-provisioner/pod.yaml
functional_test_pvc_test.go:130: (dbg) TestFunctional/parallel/PersistentVolumeClaim: waiting 3m0s for pods matching "test=storage-provisioner" in namespace "default" ...
helpers_test.go:344: "sp-pod" [c89cf5e8-e14c-4e8e-9c01-63576431ea39] Pending
helpers_test.go:344: "sp-pod" [c89cf5e8-e14c-4e8e-9c01-63576431ea39] Running
functional_test_pvc_test.go:130: (dbg) TestFunctional/parallel/PersistentVolumeClaim: test=storage-provisioner healthy within 6.004080659s
functional_test_pvc_test.go:114: (dbg) Run:  kubectl --context functional-781779 exec sp-pod -- ls /tmp/mount
--- PASS: TestFunctional/parallel/PersistentVolumeClaim (25.60s)

                                                
                                    
x
+
TestFunctional/parallel/SSHCmd (0.73s)

                                                
                                                
=== RUN   TestFunctional/parallel/SSHCmd
=== PAUSE TestFunctional/parallel/SSHCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/SSHCmd
functional_test.go:1721: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "echo hello"
functional_test.go:1738: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "cat /etc/hostname"
--- PASS: TestFunctional/parallel/SSHCmd (0.73s)

                                                
                                    
x
+
TestFunctional/parallel/CpCmd (2.35s)

                                                
                                                
=== RUN   TestFunctional/parallel/CpCmd
=== PAUSE TestFunctional/parallel/CpCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/CpCmd
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 cp testdata/cp-test.txt /home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh -n functional-781779 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 cp functional-781779:/home/docker/cp-test.txt /tmp/TestFunctionalparallelCpCmd2893298321/001/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh -n functional-781779 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 cp testdata/cp-test.txt /tmp/does/not/exist/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh -n functional-781779 "sudo cat /tmp/does/not/exist/cp-test.txt"
--- PASS: TestFunctional/parallel/CpCmd (2.35s)

                                                
                                    
x
+
TestFunctional/parallel/FileSync (0.43s)

                                                
                                                
=== RUN   TestFunctional/parallel/FileSync
=== PAUSE TestFunctional/parallel/FileSync

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/FileSync
functional_test.go:1925: Checking for existence of /etc/test/nested/copy/1195688/hosts within VM
functional_test.go:1927: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "sudo cat /etc/test/nested/copy/1195688/hosts"
functional_test.go:1932: file sync test content: Test file for checking file sync process
--- PASS: TestFunctional/parallel/FileSync (0.43s)

                                                
                                    
x
+
TestFunctional/parallel/CertSync (2.17s)

                                                
                                                
=== RUN   TestFunctional/parallel/CertSync
=== PAUSE TestFunctional/parallel/CertSync

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/CertSync
functional_test.go:1968: Checking for existence of /etc/ssl/certs/1195688.pem within VM
functional_test.go:1969: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "sudo cat /etc/ssl/certs/1195688.pem"
functional_test.go:1968: Checking for existence of /usr/share/ca-certificates/1195688.pem within VM
functional_test.go:1969: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "sudo cat /usr/share/ca-certificates/1195688.pem"
functional_test.go:1968: Checking for existence of /etc/ssl/certs/51391683.0 within VM
functional_test.go:1969: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "sudo cat /etc/ssl/certs/51391683.0"
functional_test.go:1995: Checking for existence of /etc/ssl/certs/11956882.pem within VM
functional_test.go:1996: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "sudo cat /etc/ssl/certs/11956882.pem"
functional_test.go:1995: Checking for existence of /usr/share/ca-certificates/11956882.pem within VM
functional_test.go:1996: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "sudo cat /usr/share/ca-certificates/11956882.pem"
functional_test.go:1995: Checking for existence of /etc/ssl/certs/3ec20f2e.0 within VM
functional_test.go:1996: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "sudo cat /etc/ssl/certs/3ec20f2e.0"
--- PASS: TestFunctional/parallel/CertSync (2.17s)

                                                
                                    
x
+
TestFunctional/parallel/NodeLabels (0.11s)

                                                
                                                
=== RUN   TestFunctional/parallel/NodeLabels
=== PAUSE TestFunctional/parallel/NodeLabels

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/NodeLabels
functional_test.go:218: (dbg) Run:  kubectl --context functional-781779 get nodes --output=go-template "--template='{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'"
--- PASS: TestFunctional/parallel/NodeLabels (0.11s)

                                                
                                    
x
+
TestFunctional/parallel/NonActiveRuntimeDisabled (0.71s)

                                                
                                                
=== RUN   TestFunctional/parallel/NonActiveRuntimeDisabled
=== PAUSE TestFunctional/parallel/NonActiveRuntimeDisabled

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/NonActiveRuntimeDisabled
functional_test.go:2023: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "sudo systemctl is-active docker"
functional_test.go:2023: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-781779 ssh "sudo systemctl is-active docker": exit status 1 (338.012964ms)

                                                
                                                
-- stdout --
	inactive

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
functional_test.go:2023: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "sudo systemctl is-active crio"
functional_test.go:2023: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-781779 ssh "sudo systemctl is-active crio": exit status 1 (369.87849ms)

                                                
                                                
-- stdout --
	inactive

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
--- PASS: TestFunctional/parallel/NonActiveRuntimeDisabled (0.71s)

                                                
                                    
x
+
TestFunctional/parallel/License (0.24s)

                                                
                                                
=== RUN   TestFunctional/parallel/License
=== PAUSE TestFunctional/parallel/License

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/License
functional_test.go:2284: (dbg) Run:  out/minikube-linux-arm64 license
--- PASS: TestFunctional/parallel/License (0.24s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel (0.64s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel
functional_test_tunnel_test.go:154: (dbg) daemon: [out/minikube-linux-arm64 -p functional-781779 tunnel --alsologtostderr]
functional_test_tunnel_test.go:154: (dbg) daemon: [out/minikube-linux-arm64 -p functional-781779 tunnel --alsologtostderr]
functional_test_tunnel_test.go:194: (dbg) stopping [out/minikube-linux-arm64 -p functional-781779 tunnel --alsologtostderr] ...
helpers_test.go:508: unable to kill pid 1237341: os: process already finished
functional_test_tunnel_test.go:194: (dbg) stopping [out/minikube-linux-arm64 -p functional-781779 tunnel --alsologtostderr] ...
helpers_test.go:490: unable to find parent, assuming dead: process does not exist
--- PASS: TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel (0.64s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/StartTunnel (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/StartTunnel
functional_test_tunnel_test.go:129: (dbg) daemon: [out/minikube-linux-arm64 -p functional-781779 tunnel --alsologtostderr]
--- PASS: TestFunctional/parallel/TunnelCmd/serial/StartTunnel (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup (12.55s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup
functional_test_tunnel_test.go:212: (dbg) Run:  kubectl --context functional-781779 apply -f testdata/testsvc.yaml
functional_test_tunnel_test.go:216: (dbg) TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup: waiting 4m0s for pods matching "run=nginx-svc" in namespace "default" ...
helpers_test.go:344: "nginx-svc" [a6e25a2e-cdbb-455d-bacd-af76e449b17a] Pending
helpers_test.go:344: "nginx-svc" [a6e25a2e-cdbb-455d-bacd-af76e449b17a] Pending / Ready:ContainersNotReady (containers with unready status: [nginx]) / ContainersReady:ContainersNotReady (containers with unready status: [nginx])
helpers_test.go:344: "nginx-svc" [a6e25a2e-cdbb-455d-bacd-af76e449b17a] Running
functional_test_tunnel_test.go:216: (dbg) TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup: run=nginx-svc healthy within 12.003958313s
--- PASS: TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup (12.55s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/WaitService/IngressIP (0.09s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/WaitService/IngressIP
functional_test_tunnel_test.go:234: (dbg) Run:  kubectl --context functional-781779 get svc nginx-svc -o jsonpath={.status.loadBalancer.ingress[0].ip}
--- PASS: TestFunctional/parallel/TunnelCmd/serial/WaitService/IngressIP (0.09s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/AccessDirect (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/AccessDirect
functional_test_tunnel_test.go:299: tunnel at http://10.101.13.237 is working!
--- PASS: TestFunctional/parallel/TunnelCmd/serial/AccessDirect (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel (0.11s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel
functional_test_tunnel_test.go:434: (dbg) stopping [out/minikube-linux-arm64 -p functional-781779 tunnel --alsologtostderr] ...
--- PASS: TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel (0.11s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/DeployApp (8.24s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/DeployApp
functional_test.go:1433: (dbg) Run:  kubectl --context functional-781779 create deployment hello-node --image=registry.k8s.io/echoserver-arm:1.8
functional_test.go:1441: (dbg) Run:  kubectl --context functional-781779 expose deployment hello-node --type=NodePort --port=8080
functional_test.go:1446: (dbg) TestFunctional/parallel/ServiceCmd/DeployApp: waiting 10m0s for pods matching "app=hello-node" in namespace "default" ...
helpers_test.go:344: "hello-node-65f5d5cc78-ptw4l" [87223bab-02de-4a1f-8dea-36be29180b3a] Pending / Ready:ContainersNotReady (containers with unready status: [echoserver-arm]) / ContainersReady:ContainersNotReady (containers with unready status: [echoserver-arm])
E0704 01:52:15.066674 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
helpers_test.go:344: "hello-node-65f5d5cc78-ptw4l" [87223bab-02de-4a1f-8dea-36be29180b3a] Running
functional_test.go:1446: (dbg) TestFunctional/parallel/ServiceCmd/DeployApp: app=hello-node healthy within 8.003673315s
--- PASS: TestFunctional/parallel/ServiceCmd/DeployApp (8.24s)

                                                
                                    
x
+
TestFunctional/parallel/ProfileCmd/profile_not_create (0.41s)

                                                
                                                
=== RUN   TestFunctional/parallel/ProfileCmd/profile_not_create
functional_test.go:1266: (dbg) Run:  out/minikube-linux-arm64 profile lis
functional_test.go:1271: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestFunctional/parallel/ProfileCmd/profile_not_create (0.41s)

                                                
                                    
x
+
TestFunctional/parallel/ProfileCmd/profile_list (0.38s)

                                                
                                                
=== RUN   TestFunctional/parallel/ProfileCmd/profile_list
functional_test.go:1306: (dbg) Run:  out/minikube-linux-arm64 profile list
functional_test.go:1311: Took "322.789824ms" to run "out/minikube-linux-arm64 profile list"
functional_test.go:1320: (dbg) Run:  out/minikube-linux-arm64 profile list -l
functional_test.go:1325: Took "52.902358ms" to run "out/minikube-linux-arm64 profile list -l"
--- PASS: TestFunctional/parallel/ProfileCmd/profile_list (0.38s)

                                                
                                    
x
+
TestFunctional/parallel/ProfileCmd/profile_json_output (0.4s)

                                                
                                                
=== RUN   TestFunctional/parallel/ProfileCmd/profile_json_output
functional_test.go:1357: (dbg) Run:  out/minikube-linux-arm64 profile list -o json
functional_test.go:1362: Took "335.873032ms" to run "out/minikube-linux-arm64 profile list -o json"
functional_test.go:1370: (dbg) Run:  out/minikube-linux-arm64 profile list -o json --light
functional_test.go:1375: Took "59.40528ms" to run "out/minikube-linux-arm64 profile list -o json --light"
--- PASS: TestFunctional/parallel/ProfileCmd/profile_json_output (0.40s)

                                                
                                    
x
+
TestFunctional/parallel/MountCmd/any-port (6.15s)

                                                
                                                
=== RUN   TestFunctional/parallel/MountCmd/any-port
functional_test_mount_test.go:73: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-781779 /tmp/TestFunctionalparallelMountCmdany-port1618303478/001:/mount-9p --alsologtostderr -v=1]
functional_test_mount_test.go:107: wrote "test-1720057938933987729" to /tmp/TestFunctionalparallelMountCmdany-port1618303478/001/created-by-test
functional_test_mount_test.go:107: wrote "test-1720057938933987729" to /tmp/TestFunctionalparallelMountCmdany-port1618303478/001/created-by-test-removed-by-pod
functional_test_mount_test.go:107: wrote "test-1720057938933987729" to /tmp/TestFunctionalparallelMountCmdany-port1618303478/001/test-1720057938933987729
functional_test_mount_test.go:115: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "findmnt -T /mount-9p | grep 9p"
functional_test_mount_test.go:115: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-781779 ssh "findmnt -T /mount-9p | grep 9p": exit status 1 (368.526406ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test_mount_test.go:115: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "findmnt -T /mount-9p | grep 9p"
functional_test_mount_test.go:129: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh -- ls -la /mount-9p
functional_test_mount_test.go:133: guest mount directory contents
total 2
-rw-r--r-- 1 docker docker 24 Jul  4 01:52 created-by-test
-rw-r--r-- 1 docker docker 24 Jul  4 01:52 created-by-test-removed-by-pod
-rw-r--r-- 1 docker docker 24 Jul  4 01:52 test-1720057938933987729
functional_test_mount_test.go:137: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh cat /mount-9p/test-1720057938933987729
functional_test_mount_test.go:148: (dbg) Run:  kubectl --context functional-781779 replace --force -f testdata/busybox-mount-test.yaml
functional_test_mount_test.go:153: (dbg) TestFunctional/parallel/MountCmd/any-port: waiting 4m0s for pods matching "integration-test=busybox-mount" in namespace "default" ...
helpers_test.go:344: "busybox-mount" [57da607b-0522-48cc-8970-224fc8609a14] Pending
helpers_test.go:344: "busybox-mount" [57da607b-0522-48cc-8970-224fc8609a14] Pending / Ready:ContainersNotReady (containers with unready status: [mount-munger]) / ContainersReady:ContainersNotReady (containers with unready status: [mount-munger])
helpers_test.go:344: "busybox-mount" [57da607b-0522-48cc-8970-224fc8609a14] Pending / Initialized:PodCompleted / Ready:PodCompleted / ContainersReady:PodCompleted
helpers_test.go:344: "busybox-mount" [57da607b-0522-48cc-8970-224fc8609a14] Succeeded / Initialized:PodCompleted / Ready:PodCompleted / ContainersReady:PodCompleted
functional_test_mount_test.go:153: (dbg) TestFunctional/parallel/MountCmd/any-port: integration-test=busybox-mount healthy within 3.004994901s
functional_test_mount_test.go:169: (dbg) Run:  kubectl --context functional-781779 logs busybox-mount
functional_test_mount_test.go:181: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh stat /mount-9p/created-by-test
functional_test_mount_test.go:181: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh stat /mount-9p/created-by-pod
functional_test_mount_test.go:90: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "sudo umount -f /mount-9p"
functional_test_mount_test.go:94: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-781779 /tmp/TestFunctionalparallelMountCmdany-port1618303478/001:/mount-9p --alsologtostderr -v=1] ...
--- PASS: TestFunctional/parallel/MountCmd/any-port (6.15s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/List (0.51s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/List
functional_test.go:1455: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 service list
--- PASS: TestFunctional/parallel/ServiceCmd/List (0.51s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/JSONOutput (0.52s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/JSONOutput
functional_test.go:1485: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 service list -o json
functional_test.go:1490: Took "518.485805ms" to run "out/minikube-linux-arm64 -p functional-781779 service list -o json"
--- PASS: TestFunctional/parallel/ServiceCmd/JSONOutput (0.52s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/HTTPS (0.48s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/HTTPS
functional_test.go:1505: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 service --namespace=default --https --url hello-node
functional_test.go:1518: found endpoint: https://192.168.49.2:30484
--- PASS: TestFunctional/parallel/ServiceCmd/HTTPS (0.48s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/Format (0.53s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/Format
functional_test.go:1536: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 service hello-node --url --format={{.IP}}
--- PASS: TestFunctional/parallel/ServiceCmd/Format (0.53s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/URL (0.54s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/URL
functional_test.go:1555: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 service hello-node --url
functional_test.go:1561: found endpoint for hello-node: http://192.168.49.2:30484
--- PASS: TestFunctional/parallel/ServiceCmd/URL (0.54s)

                                                
                                    
x
+
TestFunctional/parallel/MountCmd/specific-port (1.38s)

                                                
                                                
=== RUN   TestFunctional/parallel/MountCmd/specific-port
functional_test_mount_test.go:213: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-781779 /tmp/TestFunctionalparallelMountCmdspecific-port2321444039/001:/mount-9p --alsologtostderr -v=1 --port 46464]
functional_test_mount_test.go:243: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "findmnt -T /mount-9p | grep 9p"
functional_test_mount_test.go:257: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh -- ls -la /mount-9p
functional_test_mount_test.go:261: guest mount directory contents
total 0
functional_test_mount_test.go:263: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-781779 /tmp/TestFunctionalparallelMountCmdspecific-port2321444039/001:/mount-9p --alsologtostderr -v=1 --port 46464] ...
functional_test_mount_test.go:264: reading mount text
functional_test_mount_test.go:278: done reading mount text
functional_test_mount_test.go:230: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "sudo umount -f /mount-9p"
functional_test_mount_test.go:230: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-781779 ssh "sudo umount -f /mount-9p": exit status 1 (350.501413ms)

                                                
                                                
-- stdout --
	umount: /mount-9p: not mounted.

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 32

                                                
                                                
** /stderr **
functional_test_mount_test.go:232: "out/minikube-linux-arm64 -p functional-781779 ssh \"sudo umount -f /mount-9p\"": exit status 1
functional_test_mount_test.go:234: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-781779 /tmp/TestFunctionalparallelMountCmdspecific-port2321444039/001:/mount-9p --alsologtostderr -v=1 --port 46464] ...
--- PASS: TestFunctional/parallel/MountCmd/specific-port (1.38s)

                                                
                                    
x
+
TestFunctional/parallel/MountCmd/VerifyCleanup (1.71s)

                                                
                                                
=== RUN   TestFunctional/parallel/MountCmd/VerifyCleanup
functional_test_mount_test.go:298: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-781779 /tmp/TestFunctionalparallelMountCmdVerifyCleanup289470561/001:/mount1 --alsologtostderr -v=1]
functional_test_mount_test.go:298: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-781779 /tmp/TestFunctionalparallelMountCmdVerifyCleanup289470561/001:/mount2 --alsologtostderr -v=1]
functional_test_mount_test.go:298: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-781779 /tmp/TestFunctionalparallelMountCmdVerifyCleanup289470561/001:/mount3 --alsologtostderr -v=1]
functional_test_mount_test.go:325: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "findmnt -T" /mount1
functional_test_mount_test.go:325: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "findmnt -T" /mount2
functional_test_mount_test.go:325: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh "findmnt -T" /mount3
functional_test_mount_test.go:370: (dbg) Run:  out/minikube-linux-arm64 mount -p functional-781779 --kill=true
functional_test_mount_test.go:313: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-781779 /tmp/TestFunctionalparallelMountCmdVerifyCleanup289470561/001:/mount1 --alsologtostderr -v=1] ...
helpers_test.go:490: unable to find parent, assuming dead: process does not exist
functional_test_mount_test.go:313: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-781779 /tmp/TestFunctionalparallelMountCmdVerifyCleanup289470561/001:/mount2 --alsologtostderr -v=1] ...
helpers_test.go:490: unable to find parent, assuming dead: process does not exist
functional_test_mount_test.go:313: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-781779 /tmp/TestFunctionalparallelMountCmdVerifyCleanup289470561/001:/mount3 --alsologtostderr -v=1] ...
helpers_test.go:490: unable to find parent, assuming dead: process does not exist
--- PASS: TestFunctional/parallel/MountCmd/VerifyCleanup (1.71s)

                                                
                                    
x
+
TestFunctional/parallel/Version/short (0.09s)

                                                
                                                
=== RUN   TestFunctional/parallel/Version/short
=== PAUSE TestFunctional/parallel/Version/short

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/Version/short
functional_test.go:2252: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 version --short
--- PASS: TestFunctional/parallel/Version/short (0.09s)

                                                
                                    
x
+
TestFunctional/parallel/Version/components (1.21s)

                                                
                                                
=== RUN   TestFunctional/parallel/Version/components
=== PAUSE TestFunctional/parallel/Version/components

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/Version/components
functional_test.go:2266: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 version -o=json --components
functional_test.go:2266: (dbg) Done: out/minikube-linux-arm64 -p functional-781779 version -o=json --components: (1.2107002s)
--- PASS: TestFunctional/parallel/Version/components (1.21s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListShort (0.27s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListShort
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListShort

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListShort
functional_test.go:260: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 image ls --format short --alsologtostderr
functional_test.go:265: (dbg) Stdout: out/minikube-linux-arm64 -p functional-781779 image ls --format short --alsologtostderr:
registry.k8s.io/pause:latest
registry.k8s.io/pause:3.9
registry.k8s.io/pause:3.3
registry.k8s.io/pause:3.1
registry.k8s.io/kube-scheduler:v1.30.2
registry.k8s.io/kube-proxy:v1.30.2
registry.k8s.io/kube-controller-manager:v1.30.2
registry.k8s.io/kube-apiserver:v1.30.2
registry.k8s.io/etcd:3.5.12-0
registry.k8s.io/echoserver-arm:1.8
registry.k8s.io/coredns/coredns:v1.11.1
gcr.io/k8s-minikube/storage-provisioner:v5
gcr.io/k8s-minikube/busybox:1.28.4-glibc
docker.io/library/nginx:latest
docker.io/library/nginx:alpine
docker.io/library/minikube-local-cache-test:functional-781779
docker.io/kindest/kindnetd:v20240513-cd2ac642
functional_test.go:268: (dbg) Stderr: out/minikube-linux-arm64 -p functional-781779 image ls --format short --alsologtostderr:
I0704 01:52:47.067837 1242562 out.go:291] Setting OutFile to fd 1 ...
I0704 01:52:47.067997 1242562 out.go:338] TERM=,COLORTERM=, which probably does not support color
I0704 01:52:47.068008 1242562 out.go:304] Setting ErrFile to fd 2...
I0704 01:52:47.068014 1242562 out.go:338] TERM=,COLORTERM=, which probably does not support color
I0704 01:52:47.068269 1242562 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
I0704 01:52:47.068951 1242562 config.go:182] Loaded profile config "functional-781779": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
I0704 01:52:47.069079 1242562 config.go:182] Loaded profile config "functional-781779": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
I0704 01:52:47.069551 1242562 cli_runner.go:164] Run: docker container inspect functional-781779 --format={{.State.Status}}
I0704 01:52:47.089960 1242562 ssh_runner.go:195] Run: systemctl --version
I0704 01:52:47.090016 1242562 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-781779
I0704 01:52:47.112812 1242562 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33956 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/functional-781779/id_rsa Username:docker}
I0704 01:52:47.212733 1242562 ssh_runner.go:195] Run: sudo crictl images --output json
--- PASS: TestFunctional/parallel/ImageCommands/ImageListShort (0.27s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListTable (0.33s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListTable
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListTable

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListTable
functional_test.go:260: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 image ls --format table --alsologtostderr
functional_test.go:265: (dbg) Stdout: out/minikube-linux-arm64 -p functional-781779 image ls --format table --alsologtostderr:
|---------------------------------------------|--------------------|---------------|--------|
|                    Image                    |        Tag         |   Image ID    |  Size  |
|---------------------------------------------|--------------------|---------------|--------|
| registry.k8s.io/kube-controller-manager     | v1.30.2            | sha256:e1dcc3 | 28.4MB |
| registry.k8s.io/pause                       | 3.1                | sha256:8057e0 | 262kB  |
| registry.k8s.io/pause                       | 3.3                | sha256:3d1873 | 249kB  |
| registry.k8s.io/pause                       | 3.9                | sha256:829e9d | 268kB  |
| registry.k8s.io/pause                       | latest             | sha256:8cb209 | 71.3kB |
| gcr.io/k8s-minikube/storage-provisioner     | v5                 | sha256:ba04bb | 8.03MB |
| registry.k8s.io/coredns/coredns             | v1.11.1            | sha256:2437cf | 16.5MB |
| docker.io/kindest/kindnetd                  | v20240513-cd2ac642 | sha256:89d73d | 25.8MB |
| registry.k8s.io/etcd                        | 3.5.12-0           | sha256:014faa | 66.2MB |
| registry.k8s.io/kube-scheduler              | v1.30.2            | sha256:c7dd04 | 17.6MB |
| docker.io/library/nginx                     | latest             | sha256:443d19 | 67.6MB |
| gcr.io/k8s-minikube/busybox                 | 1.28.4-glibc       | sha256:1611cd | 1.94MB |
| registry.k8s.io/kube-proxy                  | v1.30.2            | sha256:66dbb9 | 25.6MB |
| registry.k8s.io/echoserver-arm              | 1.8                | sha256:72565b | 45.3MB |
| registry.k8s.io/kube-apiserver              | v1.30.2            | sha256:84c601 | 29.9MB |
| docker.io/library/minikube-local-cache-test | functional-781779  | sha256:354265 | 993B   |
| docker.io/library/nginx                     | alpine             | sha256:5461b1 | 18.3MB |
|---------------------------------------------|--------------------|---------------|--------|
functional_test.go:268: (dbg) Stderr: out/minikube-linux-arm64 -p functional-781779 image ls --format table --alsologtostderr:
I0704 01:52:47.367419 1242634 out.go:291] Setting OutFile to fd 1 ...
I0704 01:52:47.370841 1242634 out.go:338] TERM=,COLORTERM=, which probably does not support color
I0704 01:52:47.370893 1242634 out.go:304] Setting ErrFile to fd 2...
I0704 01:52:47.370916 1242634 out.go:338] TERM=,COLORTERM=, which probably does not support color
I0704 01:52:47.371252 1242634 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
I0704 01:52:47.372040 1242634 config.go:182] Loaded profile config "functional-781779": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
I0704 01:52:47.372238 1242634 config.go:182] Loaded profile config "functional-781779": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
I0704 01:52:47.372792 1242634 cli_runner.go:164] Run: docker container inspect functional-781779 --format={{.State.Status}}
I0704 01:52:47.396891 1242634 ssh_runner.go:195] Run: systemctl --version
I0704 01:52:47.396955 1242634 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-781779
I0704 01:52:47.431018 1242634 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33956 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/functional-781779/id_rsa Username:docker}
I0704 01:52:47.531901 1242634 ssh_runner.go:195] Run: sudo crictl images --output json
--- PASS: TestFunctional/parallel/ImageCommands/ImageListTable (0.33s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListJson (0.3s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListJson
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListJson

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListJson
functional_test.go:260: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 image ls --format json --alsologtostderr
functional_test.go:265: (dbg) Stdout: out/minikube-linux-arm64 -p functional-781779 image ls --format json --alsologtostderr:
[{"id":"sha256:8cb2091f603e75187e2f6226c5901d12e00b1d1f778c6471ae4578e8a1c4724a","repoDigests":[],"repoTags":["registry.k8s.io/pause:latest"],"size":"71300"},{"id":"sha256:a422e0e982356f6c1cf0e5bb7b733363caae3992a07c99951fbcc73e58ed656a","repoDigests":["docker.io/kubernetesui/metrics-scraper@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c"],"repoTags":[],"size":"18306114"},{"id":"sha256:8057e0500773a37cde2cff041eb13ebd68c748419a2fbfd1dfb5bf38696cc8e5","repoDigests":[],"repoTags":["registry.k8s.io/pause:3.1"],"size":"262191"},{"id":"sha256:e1dcc3400d3ea6a268c7ea6e66c3a196703770a8e346b695f54344ab53a47567","repoDigests":["registry.k8s.io/kube-controller-manager@sha256:4c412bc1fc585ddeba10d34a02e7507ea787ec2c57256d4c18fd230377ab048e"],"repoTags":["registry.k8s.io/kube-controller-manager:v1.30.2"],"size":"28368865"},{"id":"sha256:829e9de338bd5fdd3f16f68f83a9fb288fbc8453e881e5d5cfd0f6f2ff72b43e","repoDigests":["registry.k8s.io/pause@sha256:7031c1b283388d2c2e09b57badb803c05ebed362dc88
d84b480cc47f72a21097"],"repoTags":["registry.k8s.io/pause:3.9"],"size":"268051"},{"id":"sha256:89d73d416b992e8f9602b67b4614d9e7f0655aebb3696e18efec695e0b654c40","repoDigests":["docker.io/kindest/kindnetd@sha256:9c2b5fcda3cb5a9725ecb893f3c8998a92d51a87465a886eb563e18d649383a8"],"repoTags":["docker.io/kindest/kindnetd:v20240513-cd2ac642"],"size":"25795292"},{"id":"sha256:3542650689cd7890a83d91c1b303d3f65b1203f61639b45abd2c8d2c0ec88859","repoDigests":[],"repoTags":["docker.io/library/minikube-local-cache-test:functional-781779"],"size":"993"},{"id":"sha256:443d199e8bfcce69c2aa494b36b5f8b04c3b183277cd19190e9589fd8552d618","repoDigests":["docker.io/library/nginx@sha256:67682bda769fae1ccf5183192b8daf37b64cae99c6c3302650f6f8bf5f0f95df"],"repoTags":["docker.io/library/nginx:latest"],"size":"67646985"},{"id":"sha256:1611cd07b61d57dbbfebe6db242513fd51e1c02d20ba08af17a45837d86a8a8c","repoDigests":["gcr.io/k8s-minikube/busybox@sha256:2d03e6ceeb99250061dd110530b0ece7998cd84121f952adef120ea7c5a6f00e"],"repoTags":["gcr.io/k
8s-minikube/busybox:1.28.4-glibc"],"size":"1935750"},{"id":"sha256:014faa467e29798aeef733fe6d1a3b5e382688217b053ad23410e6cccd5d22fd","repoDigests":["registry.k8s.io/etcd@sha256:44a8e24dcbba3470ee1fee21d5e88d128c936e9b55d4bc51fbef8086f8ed123b"],"repoTags":["registry.k8s.io/etcd:3.5.12-0"],"size":"66189079"},{"id":"sha256:84c601f3f72c87776cdcf77a73329d1f45297e43a92508b0f289fa2fcf8872a0","repoDigests":["registry.k8s.io/kube-apiserver@sha256:340ab4a1d66a60630a7a298aa0b2576fcd82e51ecdddb751cf61e5d3846fde2d"],"repoTags":["registry.k8s.io/kube-apiserver:v1.30.2"],"size":"29937230"},{"id":"sha256:66dbb96a9149f69913ff817f696be766014cacdffc2ce0889a76c81165415fae","repoDigests":["registry.k8s.io/kube-proxy@sha256:8a44c6e094af3dea3de57fa967e201608a358a3bd8b4e3f31ab905bbe4108aec"],"repoTags":["registry.k8s.io/kube-proxy:v1.30.2"],"size":"25633111"},{"id":"sha256:20b332c9a70d8516d849d1ac23eff5800cbb2f263d379f0ec11ee908db6b25a8","repoDigests":["docker.io/kubernetesui/dashboard@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab0
9d65b24f588b1d449841ef93"],"repoTags":[],"size":"74084559"},{"id":"sha256:5461b18aaccf366faf9fba071a5f1ac333cd13435366b32c5e9b8ec903fa18a1","repoDigests":["docker.io/library/nginx@sha256:a45ee5d042aaa9e81e013f97ae40c3dda26fbe98f22b6251acdf28e579560d55"],"repoTags":["docker.io/library/nginx:alpine"],"size":"18252263"},{"id":"sha256:72565bf5bbedfb62e9d21afa2b1221b2c7a5e05b746dae33430bc550d3f87beb","repoDigests":["registry.k8s.io/echoserver-arm@sha256:b33d4cdf6ed097f4e9b77b135d83a596ab73c6268b0342648818eb85f5edfdb5"],"repoTags":["registry.k8s.io/echoserver-arm:1.8"],"size":"45324675"},{"id":"sha256:c7dd04b1bafeb51c650fde7f34ac0fdafa96030e77ea7a822135ff302d895dd5","repoDigests":["registry.k8s.io/kube-scheduler@sha256:0ed75a333704f5d315395c6ec04d7af7405715537069b65d40b43ec1c8e030bc"],"repoTags":["registry.k8s.io/kube-scheduler:v1.30.2"],"size":"17643200"},{"id":"sha256:3d18732f8686cc3c878055d99a05fa80289502fa496b36b6a0fe0f77206a7300","repoDigests":[],"repoTags":["registry.k8s.io/pause:3.3"],"size":"249461"},{"id":
"sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6","repoDigests":["gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"],"repoTags":["gcr.io/k8s-minikube/storage-provisioner:v5"],"size":"8034419"},{"id":"sha256:2437cf762177702dec2dfe99a09c37427a15af6d9a57c456b65352667c223d93","repoDigests":["registry.k8s.io/coredns/coredns@sha256:1eeb4c7316bacb1d4c8ead65571cd92dd21e27359f0d4917f1a5822a73b75db1"],"repoTags":["registry.k8s.io/coredns/coredns:v1.11.1"],"size":"16482581"}]
functional_test.go:268: (dbg) Stderr: out/minikube-linux-arm64 -p functional-781779 image ls --format json --alsologtostderr:
I0704 01:52:47.338261 1242628 out.go:291] Setting OutFile to fd 1 ...
I0704 01:52:47.338454 1242628 out.go:338] TERM=,COLORTERM=, which probably does not support color
I0704 01:52:47.338462 1242628 out.go:304] Setting ErrFile to fd 2...
I0704 01:52:47.338483 1242628 out.go:338] TERM=,COLORTERM=, which probably does not support color
I0704 01:52:47.338804 1242628 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
I0704 01:52:47.339695 1242628 config.go:182] Loaded profile config "functional-781779": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
I0704 01:52:47.339827 1242628 config.go:182] Loaded profile config "functional-781779": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
I0704 01:52:47.340418 1242628 cli_runner.go:164] Run: docker container inspect functional-781779 --format={{.State.Status}}
I0704 01:52:47.385257 1242628 ssh_runner.go:195] Run: systemctl --version
I0704 01:52:47.385327 1242628 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-781779
I0704 01:52:47.410080 1242628 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33956 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/functional-781779/id_rsa Username:docker}
I0704 01:52:47.503979 1242628 ssh_runner.go:195] Run: sudo crictl images --output json
--- PASS: TestFunctional/parallel/ImageCommands/ImageListJson (0.30s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListYaml (0.3s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListYaml
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListYaml

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListYaml
functional_test.go:260: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 image ls --format yaml --alsologtostderr
functional_test.go:265: (dbg) Stdout: out/minikube-linux-arm64 -p functional-781779 image ls --format yaml --alsologtostderr:
- id: sha256:89d73d416b992e8f9602b67b4614d9e7f0655aebb3696e18efec695e0b654c40
repoDigests:
- docker.io/kindest/kindnetd@sha256:9c2b5fcda3cb5a9725ecb893f3c8998a92d51a87465a886eb563e18d649383a8
repoTags:
- docker.io/kindest/kindnetd:v20240513-cd2ac642
size: "25795292"
- id: sha256:1611cd07b61d57dbbfebe6db242513fd51e1c02d20ba08af17a45837d86a8a8c
repoDigests:
- gcr.io/k8s-minikube/busybox@sha256:2d03e6ceeb99250061dd110530b0ece7998cd84121f952adef120ea7c5a6f00e
repoTags:
- gcr.io/k8s-minikube/busybox:1.28.4-glibc
size: "1935750"
- id: sha256:014faa467e29798aeef733fe6d1a3b5e382688217b053ad23410e6cccd5d22fd
repoDigests:
- registry.k8s.io/etcd@sha256:44a8e24dcbba3470ee1fee21d5e88d128c936e9b55d4bc51fbef8086f8ed123b
repoTags:
- registry.k8s.io/etcd:3.5.12-0
size: "66189079"
- id: sha256:66dbb96a9149f69913ff817f696be766014cacdffc2ce0889a76c81165415fae
repoDigests:
- registry.k8s.io/kube-proxy@sha256:8a44c6e094af3dea3de57fa967e201608a358a3bd8b4e3f31ab905bbe4108aec
repoTags:
- registry.k8s.io/kube-proxy:v1.30.2
size: "25633111"
- id: sha256:829e9de338bd5fdd3f16f68f83a9fb288fbc8453e881e5d5cfd0f6f2ff72b43e
repoDigests:
- registry.k8s.io/pause@sha256:7031c1b283388d2c2e09b57badb803c05ebed362dc88d84b480cc47f72a21097
repoTags:
- registry.k8s.io/pause:3.9
size: "268051"
- id: sha256:8cb2091f603e75187e2f6226c5901d12e00b1d1f778c6471ae4578e8a1c4724a
repoDigests: []
repoTags:
- registry.k8s.io/pause:latest
size: "71300"
- id: sha256:20b332c9a70d8516d849d1ac23eff5800cbb2f263d379f0ec11ee908db6b25a8
repoDigests:
- docker.io/kubernetesui/dashboard@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab09d65b24f588b1d449841ef93
repoTags: []
size: "74084559"
- id: sha256:a422e0e982356f6c1cf0e5bb7b733363caae3992a07c99951fbcc73e58ed656a
repoDigests:
- docker.io/kubernetesui/metrics-scraper@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c
repoTags: []
size: "18306114"
- id: sha256:5461b18aaccf366faf9fba071a5f1ac333cd13435366b32c5e9b8ec903fa18a1
repoDigests:
- docker.io/library/nginx@sha256:a45ee5d042aaa9e81e013f97ae40c3dda26fbe98f22b6251acdf28e579560d55
repoTags:
- docker.io/library/nginx:alpine
size: "18252263"
- id: sha256:2437cf762177702dec2dfe99a09c37427a15af6d9a57c456b65352667c223d93
repoDigests:
- registry.k8s.io/coredns/coredns@sha256:1eeb4c7316bacb1d4c8ead65571cd92dd21e27359f0d4917f1a5822a73b75db1
repoTags:
- registry.k8s.io/coredns/coredns:v1.11.1
size: "16482581"
- id: sha256:8057e0500773a37cde2cff041eb13ebd68c748419a2fbfd1dfb5bf38696cc8e5
repoDigests: []
repoTags:
- registry.k8s.io/pause:3.1
size: "262191"
- id: sha256:72565bf5bbedfb62e9d21afa2b1221b2c7a5e05b746dae33430bc550d3f87beb
repoDigests:
- registry.k8s.io/echoserver-arm@sha256:b33d4cdf6ed097f4e9b77b135d83a596ab73c6268b0342648818eb85f5edfdb5
repoTags:
- registry.k8s.io/echoserver-arm:1.8
size: "45324675"
- id: sha256:84c601f3f72c87776cdcf77a73329d1f45297e43a92508b0f289fa2fcf8872a0
repoDigests:
- registry.k8s.io/kube-apiserver@sha256:340ab4a1d66a60630a7a298aa0b2576fcd82e51ecdddb751cf61e5d3846fde2d
repoTags:
- registry.k8s.io/kube-apiserver:v1.30.2
size: "29937230"
- id: sha256:c7dd04b1bafeb51c650fde7f34ac0fdafa96030e77ea7a822135ff302d895dd5
repoDigests:
- registry.k8s.io/kube-scheduler@sha256:0ed75a333704f5d315395c6ec04d7af7405715537069b65d40b43ec1c8e030bc
repoTags:
- registry.k8s.io/kube-scheduler:v1.30.2
size: "17643200"
- id: sha256:3542650689cd7890a83d91c1b303d3f65b1203f61639b45abd2c8d2c0ec88859
repoDigests: []
repoTags:
- docker.io/library/minikube-local-cache-test:functional-781779
size: "993"
- id: sha256:443d199e8bfcce69c2aa494b36b5f8b04c3b183277cd19190e9589fd8552d618
repoDigests:
- docker.io/library/nginx@sha256:67682bda769fae1ccf5183192b8daf37b64cae99c6c3302650f6f8bf5f0f95df
repoTags:
- docker.io/library/nginx:latest
size: "67646985"
- id: sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6
repoDigests:
- gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944
repoTags:
- gcr.io/k8s-minikube/storage-provisioner:v5
size: "8034419"
- id: sha256:e1dcc3400d3ea6a268c7ea6e66c3a196703770a8e346b695f54344ab53a47567
repoDigests:
- registry.k8s.io/kube-controller-manager@sha256:4c412bc1fc585ddeba10d34a02e7507ea787ec2c57256d4c18fd230377ab048e
repoTags:
- registry.k8s.io/kube-controller-manager:v1.30.2
size: "28368865"
- id: sha256:3d18732f8686cc3c878055d99a05fa80289502fa496b36b6a0fe0f77206a7300
repoDigests: []
repoTags:
- registry.k8s.io/pause:3.3
size: "249461"

                                                
                                                
functional_test.go:268: (dbg) Stderr: out/minikube-linux-arm64 -p functional-781779 image ls --format yaml --alsologtostderr:
I0704 01:52:47.068260 1242563 out.go:291] Setting OutFile to fd 1 ...
I0704 01:52:47.068460 1242563 out.go:338] TERM=,COLORTERM=, which probably does not support color
I0704 01:52:47.068486 1242563 out.go:304] Setting ErrFile to fd 2...
I0704 01:52:47.068593 1242563 out.go:338] TERM=,COLORTERM=, which probably does not support color
I0704 01:52:47.068894 1242563 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
I0704 01:52:47.069727 1242563 config.go:182] Loaded profile config "functional-781779": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
I0704 01:52:47.070048 1242563 config.go:182] Loaded profile config "functional-781779": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
I0704 01:52:47.070586 1242563 cli_runner.go:164] Run: docker container inspect functional-781779 --format={{.State.Status}}
I0704 01:52:47.095199 1242563 ssh_runner.go:195] Run: systemctl --version
I0704 01:52:47.095252 1242563 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-781779
I0704 01:52:47.112013 1242563 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33956 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/functional-781779/id_rsa Username:docker}
I0704 01:52:47.208304 1242563 ssh_runner.go:195] Run: sudo crictl images --output json
--- PASS: TestFunctional/parallel/ImageCommands/ImageListYaml (0.30s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageBuild (2.59s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageBuild
=== PAUSE TestFunctional/parallel/ImageCommands/ImageBuild

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageBuild
functional_test.go:307: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 ssh pgrep buildkitd
functional_test.go:307: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-781779 ssh pgrep buildkitd: exit status 1 (261.277692ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test.go:314: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 image build -t localhost/my-image:functional-781779 testdata/build --alsologtostderr
functional_test.go:314: (dbg) Done: out/minikube-linux-arm64 -p functional-781779 image build -t localhost/my-image:functional-781779 testdata/build --alsologtostderr: (2.082235826s)
functional_test.go:322: (dbg) Stderr: out/minikube-linux-arm64 -p functional-781779 image build -t localhost/my-image:functional-781779 testdata/build --alsologtostderr:
I0704 01:52:47.866551 1242749 out.go:291] Setting OutFile to fd 1 ...
I0704 01:52:47.867247 1242749 out.go:338] TERM=,COLORTERM=, which probably does not support color
I0704 01:52:47.867262 1242749 out.go:304] Setting ErrFile to fd 2...
I0704 01:52:47.867268 1242749 out.go:338] TERM=,COLORTERM=, which probably does not support color
I0704 01:52:47.867589 1242749 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
I0704 01:52:47.868265 1242749 config.go:182] Loaded profile config "functional-781779": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
I0704 01:52:47.869006 1242749 config.go:182] Loaded profile config "functional-781779": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
I0704 01:52:47.869542 1242749 cli_runner.go:164] Run: docker container inspect functional-781779 --format={{.State.Status}}
I0704 01:52:47.886764 1242749 ssh_runner.go:195] Run: systemctl --version
I0704 01:52:47.886818 1242749 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-781779
I0704 01:52:47.910576 1242749 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33956 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/functional-781779/id_rsa Username:docker}
I0704 01:52:48.009079 1242749 build_images.go:161] Building image from path: /tmp/build.2374656857.tar
I0704 01:52:48.009180 1242749 ssh_runner.go:195] Run: sudo mkdir -p /var/lib/minikube/build
I0704 01:52:48.020748 1242749 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/build/build.2374656857.tar
I0704 01:52:48.025693 1242749 ssh_runner.go:352] existence check for /var/lib/minikube/build/build.2374656857.tar: stat -c "%s %y" /var/lib/minikube/build/build.2374656857.tar: Process exited with status 1
stdout:

                                                
                                                
stderr:
stat: cannot statx '/var/lib/minikube/build/build.2374656857.tar': No such file or directory
I0704 01:52:48.025732 1242749 ssh_runner.go:362] scp /tmp/build.2374656857.tar --> /var/lib/minikube/build/build.2374656857.tar (3072 bytes)
I0704 01:52:48.056106 1242749 ssh_runner.go:195] Run: sudo mkdir -p /var/lib/minikube/build/build.2374656857
I0704 01:52:48.071291 1242749 ssh_runner.go:195] Run: sudo tar -C /var/lib/minikube/build/build.2374656857 -xf /var/lib/minikube/build/build.2374656857.tar
I0704 01:52:48.083677 1242749 containerd.go:394] Building image: /var/lib/minikube/build/build.2374656857
I0704 01:52:48.083806 1242749 ssh_runner.go:195] Run: sudo buildctl build --frontend dockerfile.v0 --local context=/var/lib/minikube/build/build.2374656857 --local dockerfile=/var/lib/minikube/build/build.2374656857 --output type=image,name=localhost/my-image:functional-781779
#1 [internal] load build definition from Dockerfile
#1 transferring dockerfile: 97B done
#1 DONE 0.0s

                                                
                                                
#2 [internal] load metadata for gcr.io/k8s-minikube/busybox:latest
#2 DONE 0.7s

                                                
                                                
#3 [internal] load .dockerignore
#3 transferring context: 2B done
#3 DONE 0.0s

                                                
                                                
#4 [internal] load build context
#4 transferring context: 62B done
#4 DONE 0.0s

                                                
                                                
#5 [1/3] FROM gcr.io/k8s-minikube/busybox:latest@sha256:ca5ae90100d50772da31f3b5016209e25ad61972404e2ccd83d44f10dee7e79b
#5 resolve gcr.io/k8s-minikube/busybox:latest@sha256:ca5ae90100d50772da31f3b5016209e25ad61972404e2ccd83d44f10dee7e79b 0.0s done
#5 sha256:a01966dde7f8d5ba10b6d87e776c7c8fb5a5f6bfa678874bd28b33b1fc6dba34 828.50kB / 828.50kB 0.2s
#5 sha256:a01966dde7f8d5ba10b6d87e776c7c8fb5a5f6bfa678874bd28b33b1fc6dba34 828.50kB / 828.50kB 0.2s done
#5 extracting sha256:a01966dde7f8d5ba10b6d87e776c7c8fb5a5f6bfa678874bd28b33b1fc6dba34 0.1s done
#5 DONE 0.3s

                                                
                                                
#6 [2/3] RUN true
#6 DONE 0.3s

                                                
                                                
#7 [3/3] ADD content.txt /
#7 DONE 0.0s

                                                
                                                
#8 exporting to image
#8 exporting layers 0.1s done
#8 exporting manifest sha256:b482a3c7eee1ce96c5c16bd981fd5006b755d704fbe11165f6f16cf339b19fe4
#8 exporting manifest sha256:b482a3c7eee1ce96c5c16bd981fd5006b755d704fbe11165f6f16cf339b19fe4 0.0s done
#8 exporting config sha256:db9692a9b396e46d0950b9149ee46b70230ccd0e6a772069f71c7d9dc4f13cb8 0.0s done
#8 naming to localhost/my-image:functional-781779 done
#8 DONE 0.1s
I0704 01:52:49.873221 1242749 ssh_runner.go:235] Completed: sudo buildctl build --frontend dockerfile.v0 --local context=/var/lib/minikube/build/build.2374656857 --local dockerfile=/var/lib/minikube/build/build.2374656857 --output type=image,name=localhost/my-image:functional-781779: (1.789374146s)
I0704 01:52:49.873308 1242749 ssh_runner.go:195] Run: sudo rm -rf /var/lib/minikube/build/build.2374656857
I0704 01:52:49.883999 1242749 ssh_runner.go:195] Run: sudo rm -f /var/lib/minikube/build/build.2374656857.tar
I0704 01:52:49.895114 1242749 build_images.go:217] Built localhost/my-image:functional-781779 from /tmp/build.2374656857.tar
I0704 01:52:49.895188 1242749 build_images.go:133] succeeded building to: functional-781779
I0704 01:52:49.895208 1242749 build_images.go:134] failed building to: 
functional_test.go:447: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageBuild (2.59s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/Setup (1.87s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/Setup
functional_test.go:341: (dbg) Run:  docker pull gcr.io/google-containers/addon-resizer:1.8.8
functional_test.go:341: (dbg) Done: docker pull gcr.io/google-containers/addon-resizer:1.8.8: (1.8453768s)
functional_test.go:346: (dbg) Run:  docker tag gcr.io/google-containers/addon-resizer:1.8.8 gcr.io/google-containers/addon-resizer:functional-781779
--- PASS: TestFunctional/parallel/ImageCommands/Setup (1.87s)

                                                
                                    
x
+
TestFunctional/parallel/UpdateContextCmd/no_changes (0.15s)

                                                
                                                
=== RUN   TestFunctional/parallel/UpdateContextCmd/no_changes
=== PAUSE TestFunctional/parallel/UpdateContextCmd/no_changes

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/UpdateContextCmd/no_changes
functional_test.go:2115: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 update-context --alsologtostderr -v=2
--- PASS: TestFunctional/parallel/UpdateContextCmd/no_changes (0.15s)

                                                
                                    
x
+
TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster (0.24s)

                                                
                                                
=== RUN   TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster
=== PAUSE TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster
functional_test.go:2115: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 update-context --alsologtostderr -v=2
--- PASS: TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster (0.24s)

                                                
                                    
x
+
TestFunctional/parallel/UpdateContextCmd/no_clusters (0.16s)

                                                
                                                
=== RUN   TestFunctional/parallel/UpdateContextCmd/no_clusters
=== PAUSE TestFunctional/parallel/UpdateContextCmd/no_clusters

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/UpdateContextCmd/no_clusters
functional_test.go:2115: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 update-context --alsologtostderr -v=2
--- PASS: TestFunctional/parallel/UpdateContextCmd/no_clusters (0.16s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageRemove (0.45s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageRemove
functional_test.go:391: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 image rm gcr.io/google-containers/addon-resizer:functional-781779 --alsologtostderr
functional_test.go:447: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageRemove (0.45s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageSaveDaemon (0.62s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageSaveDaemon
functional_test.go:418: (dbg) Run:  docker rmi gcr.io/google-containers/addon-resizer:functional-781779
functional_test.go:423: (dbg) Run:  out/minikube-linux-arm64 -p functional-781779 image save --daemon gcr.io/google-containers/addon-resizer:functional-781779 --alsologtostderr
functional_test.go:428: (dbg) Run:  docker image inspect gcr.io/google-containers/addon-resizer:functional-781779
--- PASS: TestFunctional/parallel/ImageCommands/ImageSaveDaemon (0.62s)

                                                
                                    
x
+
TestFunctional/delete_addon-resizer_images (0.08s)

                                                
                                                
=== RUN   TestFunctional/delete_addon-resizer_images
functional_test.go:189: (dbg) Run:  docker rmi -f gcr.io/google-containers/addon-resizer:1.8.8
functional_test.go:189: (dbg) Run:  docker rmi -f gcr.io/google-containers/addon-resizer:functional-781779
--- PASS: TestFunctional/delete_addon-resizer_images (0.08s)

                                                
                                    
x
+
TestFunctional/delete_my-image_image (0.02s)

                                                
                                                
=== RUN   TestFunctional/delete_my-image_image
functional_test.go:197: (dbg) Run:  docker rmi -f localhost/my-image:functional-781779
--- PASS: TestFunctional/delete_my-image_image (0.02s)

                                                
                                    
x
+
TestFunctional/delete_minikube_cached_images (0.02s)

                                                
                                                
=== RUN   TestFunctional/delete_minikube_cached_images
functional_test.go:205: (dbg) Run:  docker rmi -f minikube-local-cache-test:functional-781779
--- PASS: TestFunctional/delete_minikube_cached_images (0.02s)

                                                
                                    
x
+
TestMultiControlPlane/serial/StartCluster (128.15s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/StartCluster
ha_test.go:101: (dbg) Run:  out/minikube-linux-arm64 start -p ha-386526 --wait=true --memory=2200 --ha -v=7 --alsologtostderr --driver=docker  --container-runtime=containerd
E0704 01:53:36.987820 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
ha_test.go:101: (dbg) Done: out/minikube-linux-arm64 start -p ha-386526 --wait=true --memory=2200 --ha -v=7 --alsologtostderr --driver=docker  --container-runtime=containerd: (2m6.924138017s)
ha_test.go:107: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 status -v=7 --alsologtostderr
ha_test.go:107: (dbg) Done: out/minikube-linux-arm64 -p ha-386526 status -v=7 --alsologtostderr: (1.22870394s)
--- PASS: TestMultiControlPlane/serial/StartCluster (128.15s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DeployApp (17.28s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DeployApp
ha_test.go:128: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- apply -f ./testdata/ha/ha-pod-dns-test.yaml
ha_test.go:133: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- rollout status deployment/busybox
ha_test.go:133: (dbg) Done: out/minikube-linux-arm64 kubectl -p ha-386526 -- rollout status deployment/busybox: (14.268842722s)
ha_test.go:140: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- get pods -o jsonpath='{.items[*].status.podIP}'
ha_test.go:163: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- get pods -o jsonpath='{.items[*].metadata.name}'
ha_test.go:171: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- exec busybox-fc5497c4f-27mp7 -- nslookup kubernetes.io
ha_test.go:171: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- exec busybox-fc5497c4f-9fvgh -- nslookup kubernetes.io
ha_test.go:171: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- exec busybox-fc5497c4f-dqrkr -- nslookup kubernetes.io
ha_test.go:181: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- exec busybox-fc5497c4f-27mp7 -- nslookup kubernetes.default
ha_test.go:181: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- exec busybox-fc5497c4f-9fvgh -- nslookup kubernetes.default
ha_test.go:181: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- exec busybox-fc5497c4f-dqrkr -- nslookup kubernetes.default
ha_test.go:189: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- exec busybox-fc5497c4f-27mp7 -- nslookup kubernetes.default.svc.cluster.local
ha_test.go:189: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- exec busybox-fc5497c4f-9fvgh -- nslookup kubernetes.default.svc.cluster.local
ha_test.go:189: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- exec busybox-fc5497c4f-dqrkr -- nslookup kubernetes.default.svc.cluster.local
--- PASS: TestMultiControlPlane/serial/DeployApp (17.28s)

                                                
                                    
x
+
TestMultiControlPlane/serial/PingHostFromPods (1.58s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/PingHostFromPods
ha_test.go:199: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- get pods -o jsonpath='{.items[*].metadata.name}'
ha_test.go:207: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- exec busybox-fc5497c4f-27mp7 -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
ha_test.go:218: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- exec busybox-fc5497c4f-27mp7 -- sh -c "ping -c 1 192.168.49.1"
ha_test.go:207: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- exec busybox-fc5497c4f-9fvgh -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
ha_test.go:218: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- exec busybox-fc5497c4f-9fvgh -- sh -c "ping -c 1 192.168.49.1"
ha_test.go:207: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- exec busybox-fc5497c4f-dqrkr -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
ha_test.go:218: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-386526 -- exec busybox-fc5497c4f-dqrkr -- sh -c "ping -c 1 192.168.49.1"
--- PASS: TestMultiControlPlane/serial/PingHostFromPods (1.58s)

                                                
                                    
x
+
TestMultiControlPlane/serial/AddWorkerNode (23.67s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/AddWorkerNode
ha_test.go:228: (dbg) Run:  out/minikube-linux-arm64 node add -p ha-386526 -v=7 --alsologtostderr
ha_test.go:228: (dbg) Done: out/minikube-linux-arm64 node add -p ha-386526 -v=7 --alsologtostderr: (22.659071587s)
ha_test.go:234: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 status -v=7 --alsologtostderr
ha_test.go:234: (dbg) Done: out/minikube-linux-arm64 -p ha-386526 status -v=7 --alsologtostderr: (1.013279339s)
--- PASS: TestMultiControlPlane/serial/AddWorkerNode (23.67s)

                                                
                                    
x
+
TestMultiControlPlane/serial/NodeLabels (0.13s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/NodeLabels
ha_test.go:255: (dbg) Run:  kubectl --context ha-386526 get nodes -o "jsonpath=[{range .items[*]}{.metadata.labels},{end}]"
--- PASS: TestMultiControlPlane/serial/NodeLabels (0.13s)

                                                
                                    
x
+
TestMultiControlPlane/serial/HAppyAfterClusterStart (0.76s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/HAppyAfterClusterStart
ha_test.go:281: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/HAppyAfterClusterStart (0.76s)

                                                
                                    
x
+
TestMultiControlPlane/serial/CopyFile (19.8s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/CopyFile
ha_test.go:326: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 status --output json -v=7 --alsologtostderr
ha_test.go:326: (dbg) Done: out/minikube-linux-arm64 -p ha-386526 status --output json -v=7 --alsologtostderr: (1.135191448s)
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp testdata/cp-test.txt ha-386526:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp ha-386526:/home/docker/cp-test.txt /tmp/TestMultiControlPlaneserialCopyFile3580938532/001/cp-test_ha-386526.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp ha-386526:/home/docker/cp-test.txt ha-386526-m02:/home/docker/cp-test_ha-386526_ha-386526-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m02 "sudo cat /home/docker/cp-test_ha-386526_ha-386526-m02.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp ha-386526:/home/docker/cp-test.txt ha-386526-m03:/home/docker/cp-test_ha-386526_ha-386526-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m03 "sudo cat /home/docker/cp-test_ha-386526_ha-386526-m03.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp ha-386526:/home/docker/cp-test.txt ha-386526-m04:/home/docker/cp-test_ha-386526_ha-386526-m04.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m04 "sudo cat /home/docker/cp-test_ha-386526_ha-386526-m04.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp testdata/cp-test.txt ha-386526-m02:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp ha-386526-m02:/home/docker/cp-test.txt /tmp/TestMultiControlPlaneserialCopyFile3580938532/001/cp-test_ha-386526-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp ha-386526-m02:/home/docker/cp-test.txt ha-386526:/home/docker/cp-test_ha-386526-m02_ha-386526.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526 "sudo cat /home/docker/cp-test_ha-386526-m02_ha-386526.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp ha-386526-m02:/home/docker/cp-test.txt ha-386526-m03:/home/docker/cp-test_ha-386526-m02_ha-386526-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m02 "sudo cat /home/docker/cp-test.txt"
E0704 01:55:53.143785 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m03 "sudo cat /home/docker/cp-test_ha-386526-m02_ha-386526-m03.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp ha-386526-m02:/home/docker/cp-test.txt ha-386526-m04:/home/docker/cp-test_ha-386526-m02_ha-386526-m04.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m04 "sudo cat /home/docker/cp-test_ha-386526-m02_ha-386526-m04.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp testdata/cp-test.txt ha-386526-m03:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp ha-386526-m03:/home/docker/cp-test.txt /tmp/TestMultiControlPlaneserialCopyFile3580938532/001/cp-test_ha-386526-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp ha-386526-m03:/home/docker/cp-test.txt ha-386526:/home/docker/cp-test_ha-386526-m03_ha-386526.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526 "sudo cat /home/docker/cp-test_ha-386526-m03_ha-386526.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp ha-386526-m03:/home/docker/cp-test.txt ha-386526-m02:/home/docker/cp-test_ha-386526-m03_ha-386526-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m02 "sudo cat /home/docker/cp-test_ha-386526-m03_ha-386526-m02.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp ha-386526-m03:/home/docker/cp-test.txt ha-386526-m04:/home/docker/cp-test_ha-386526-m03_ha-386526-m04.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m04 "sudo cat /home/docker/cp-test_ha-386526-m03_ha-386526-m04.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp testdata/cp-test.txt ha-386526-m04:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp ha-386526-m04:/home/docker/cp-test.txt /tmp/TestMultiControlPlaneserialCopyFile3580938532/001/cp-test_ha-386526-m04.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp ha-386526-m04:/home/docker/cp-test.txt ha-386526:/home/docker/cp-test_ha-386526-m04_ha-386526.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526 "sudo cat /home/docker/cp-test_ha-386526-m04_ha-386526.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp ha-386526-m04:/home/docker/cp-test.txt ha-386526-m02:/home/docker/cp-test_ha-386526-m04_ha-386526-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m02 "sudo cat /home/docker/cp-test_ha-386526-m04_ha-386526-m02.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 cp ha-386526-m04:/home/docker/cp-test.txt ha-386526-m03:/home/docker/cp-test_ha-386526-m04_ha-386526-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 ssh -n ha-386526-m03 "sudo cat /home/docker/cp-test_ha-386526-m04_ha-386526-m03.txt"
--- PASS: TestMultiControlPlane/serial/CopyFile (19.80s)

                                                
                                    
x
+
TestMultiControlPlane/serial/StopSecondaryNode (12.91s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/StopSecondaryNode
ha_test.go:363: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 node stop m02 -v=7 --alsologtostderr
ha_test.go:363: (dbg) Done: out/minikube-linux-arm64 -p ha-386526 node stop m02 -v=7 --alsologtostderr: (12.132584724s)
ha_test.go:369: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 status -v=7 --alsologtostderr
ha_test.go:369: (dbg) Non-zero exit: out/minikube-linux-arm64 -p ha-386526 status -v=7 --alsologtostderr: exit status 7 (780.382217ms)

                                                
                                                
-- stdout --
	ha-386526
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	ha-386526-m02
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	ha-386526-m03
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	ha-386526-m04
	type: Worker
	host: Running
	kubelet: Running
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0704 01:56:16.469228 1259140 out.go:291] Setting OutFile to fd 1 ...
	I0704 01:56:16.469410 1259140 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:56:16.469423 1259140 out.go:304] Setting ErrFile to fd 2...
	I0704 01:56:16.469429 1259140 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:56:16.469717 1259140 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
	I0704 01:56:16.469939 1259140 out.go:298] Setting JSON to false
	I0704 01:56:16.469978 1259140 mustload.go:65] Loading cluster: ha-386526
	I0704 01:56:16.470061 1259140 notify.go:220] Checking for updates...
	I0704 01:56:16.470430 1259140 config.go:182] Loaded profile config "ha-386526": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 01:56:16.470450 1259140 status.go:255] checking status of ha-386526 ...
	I0704 01:56:16.471305 1259140 cli_runner.go:164] Run: docker container inspect ha-386526 --format={{.State.Status}}
	I0704 01:56:16.491054 1259140 status.go:330] ha-386526 host status = "Running" (err=<nil>)
	I0704 01:56:16.491083 1259140 host.go:66] Checking if "ha-386526" exists ...
	I0704 01:56:16.491413 1259140 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-386526
	I0704 01:56:16.513420 1259140 host.go:66] Checking if "ha-386526" exists ...
	I0704 01:56:16.513740 1259140 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0704 01:56:16.513795 1259140 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-386526
	I0704 01:56:16.536089 1259140 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33961 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/ha-386526/id_rsa Username:docker}
	I0704 01:56:16.633486 1259140 ssh_runner.go:195] Run: systemctl --version
	I0704 01:56:16.639111 1259140 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0704 01:56:16.653086 1259140 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 01:56:16.745138 1259140 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:4 ContainersRunning:3 ContainersPaused:0 ContainersStopped:1 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:55 OomKillDisable:true NGoroutines:71 SystemTime:2024-07-04 01:56:16.733917922 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 01:56:16.745925 1259140 kubeconfig.go:125] found "ha-386526" server: "https://192.168.49.254:8443"
	I0704 01:56:16.745952 1259140 api_server.go:166] Checking apiserver status ...
	I0704 01:56:16.746005 1259140 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0704 01:56:16.760602 1259140 ssh_runner.go:195] Run: sudo egrep ^[0-9]+:freezer: /proc/1514/cgroup
	I0704 01:56:16.771162 1259140 api_server.go:182] apiserver freezer: "8:freezer:/docker/337acf08ce6e8fb9cb96207d05b6cf3e31a516435ff5825cf2a17bce243eebe8/kubepods/burstable/pod7d0f9c6e6f1db3c968ad55540610dd8c/8d060e43888f4723b0c4bbb86cd7503f59c716e2581ccd00b561ccf35efc3ae1"
	I0704 01:56:16.771247 1259140 ssh_runner.go:195] Run: sudo cat /sys/fs/cgroup/freezer/docker/337acf08ce6e8fb9cb96207d05b6cf3e31a516435ff5825cf2a17bce243eebe8/kubepods/burstable/pod7d0f9c6e6f1db3c968ad55540610dd8c/8d060e43888f4723b0c4bbb86cd7503f59c716e2581ccd00b561ccf35efc3ae1/freezer.state
	I0704 01:56:16.781021 1259140 api_server.go:204] freezer state: "THAWED"
	I0704 01:56:16.781051 1259140 api_server.go:253] Checking apiserver healthz at https://192.168.49.254:8443/healthz ...
	I0704 01:56:16.790500 1259140 api_server.go:279] https://192.168.49.254:8443/healthz returned 200:
	ok
	I0704 01:56:16.790532 1259140 status.go:422] ha-386526 apiserver status = Running (err=<nil>)
	I0704 01:56:16.790544 1259140 status.go:257] ha-386526 status: &{Name:ha-386526 Host:Running Kubelet:Running APIServer:Running Kubeconfig:Configured Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0704 01:56:16.790561 1259140 status.go:255] checking status of ha-386526-m02 ...
	I0704 01:56:16.790885 1259140 cli_runner.go:164] Run: docker container inspect ha-386526-m02 --format={{.State.Status}}
	I0704 01:56:16.809279 1259140 status.go:330] ha-386526-m02 host status = "Stopped" (err=<nil>)
	I0704 01:56:16.809304 1259140 status.go:343] host is not running, skipping remaining checks
	I0704 01:56:16.809313 1259140 status.go:257] ha-386526-m02 status: &{Name:ha-386526-m02 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0704 01:56:16.809334 1259140 status.go:255] checking status of ha-386526-m03 ...
	I0704 01:56:16.809662 1259140 cli_runner.go:164] Run: docker container inspect ha-386526-m03 --format={{.State.Status}}
	I0704 01:56:16.831994 1259140 status.go:330] ha-386526-m03 host status = "Running" (err=<nil>)
	I0704 01:56:16.832022 1259140 host.go:66] Checking if "ha-386526-m03" exists ...
	I0704 01:56:16.832362 1259140 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-386526-m03
	I0704 01:56:16.850514 1259140 host.go:66] Checking if "ha-386526-m03" exists ...
	I0704 01:56:16.850896 1259140 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0704 01:56:16.850957 1259140 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-386526-m03
	I0704 01:56:16.871554 1259140 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33971 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/ha-386526-m03/id_rsa Username:docker}
	I0704 01:56:16.976605 1259140 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0704 01:56:16.989365 1259140 kubeconfig.go:125] found "ha-386526" server: "https://192.168.49.254:8443"
	I0704 01:56:16.989398 1259140 api_server.go:166] Checking apiserver status ...
	I0704 01:56:16.989444 1259140 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0704 01:56:17.001212 1259140 ssh_runner.go:195] Run: sudo egrep ^[0-9]+:freezer: /proc/1397/cgroup
	I0704 01:56:17.015391 1259140 api_server.go:182] apiserver freezer: "8:freezer:/docker/5415fe38056ddeef90b9f2810a5c66cc131d3644cfb49d7bf5cdb3852f89d8ac/kubepods/burstable/pod188e1df0d39df5632fffb266ac3be5cf/c82294853493b3f56aea7a6499e7f2e43d662e533418e7ea5cb5d7708f2a1a02"
	I0704 01:56:17.015545 1259140 ssh_runner.go:195] Run: sudo cat /sys/fs/cgroup/freezer/docker/5415fe38056ddeef90b9f2810a5c66cc131d3644cfb49d7bf5cdb3852f89d8ac/kubepods/burstable/pod188e1df0d39df5632fffb266ac3be5cf/c82294853493b3f56aea7a6499e7f2e43d662e533418e7ea5cb5d7708f2a1a02/freezer.state
	I0704 01:56:17.025094 1259140 api_server.go:204] freezer state: "THAWED"
	I0704 01:56:17.025121 1259140 api_server.go:253] Checking apiserver healthz at https://192.168.49.254:8443/healthz ...
	I0704 01:56:17.033136 1259140 api_server.go:279] https://192.168.49.254:8443/healthz returned 200:
	ok
	I0704 01:56:17.033169 1259140 status.go:422] ha-386526-m03 apiserver status = Running (err=<nil>)
	I0704 01:56:17.033179 1259140 status.go:257] ha-386526-m03 status: &{Name:ha-386526-m03 Host:Running Kubelet:Running APIServer:Running Kubeconfig:Configured Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0704 01:56:17.033221 1259140 status.go:255] checking status of ha-386526-m04 ...
	I0704 01:56:17.033543 1259140 cli_runner.go:164] Run: docker container inspect ha-386526-m04 --format={{.State.Status}}
	I0704 01:56:17.050708 1259140 status.go:330] ha-386526-m04 host status = "Running" (err=<nil>)
	I0704 01:56:17.050735 1259140 host.go:66] Checking if "ha-386526-m04" exists ...
	I0704 01:56:17.051055 1259140 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-386526-m04
	I0704 01:56:17.069487 1259140 host.go:66] Checking if "ha-386526-m04" exists ...
	I0704 01:56:17.069816 1259140 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0704 01:56:17.069861 1259140 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-386526-m04
	I0704 01:56:17.086388 1259140 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33976 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/ha-386526-m04/id_rsa Username:docker}
	I0704 01:56:17.181278 1259140 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0704 01:56:17.192711 1259140 status.go:257] ha-386526-m04 status: &{Name:ha-386526-m04 Host:Running Kubelet:Running APIServer:Irrelevant Kubeconfig:Irrelevant Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
--- PASS: TestMultiControlPlane/serial/StopSecondaryNode (12.91s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DegradedAfterControlPlaneNodeStop (0.58s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DegradedAfterControlPlaneNodeStop
ha_test.go:390: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/DegradedAfterControlPlaneNodeStop (0.58s)

                                                
                                    
x
+
TestMultiControlPlane/serial/RestartSecondaryNode (19.5s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/RestartSecondaryNode
ha_test.go:420: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 node start m02 -v=7 --alsologtostderr
E0704 01:56:20.830218 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
ha_test.go:420: (dbg) Done: out/minikube-linux-arm64 -p ha-386526 node start m02 -v=7 --alsologtostderr: (18.317091234s)
ha_test.go:428: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 status -v=7 --alsologtostderr
ha_test.go:428: (dbg) Done: out/minikube-linux-arm64 -p ha-386526 status -v=7 --alsologtostderr: (1.083998569s)
ha_test.go:448: (dbg) Run:  kubectl get nodes
--- PASS: TestMultiControlPlane/serial/RestartSecondaryNode (19.50s)

                                                
                                    
x
+
TestMultiControlPlane/serial/HAppyAfterSecondaryNodeRestart (0.78s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/HAppyAfterSecondaryNodeRestart
ha_test.go:281: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/HAppyAfterSecondaryNodeRestart (0.78s)

                                                
                                    
x
+
TestMultiControlPlane/serial/RestartClusterKeepsNodes (143.52s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/RestartClusterKeepsNodes
ha_test.go:456: (dbg) Run:  out/minikube-linux-arm64 node list -p ha-386526 -v=7 --alsologtostderr
ha_test.go:462: (dbg) Run:  out/minikube-linux-arm64 stop -p ha-386526 -v=7 --alsologtostderr
E0704 01:56:51.649163 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
E0704 01:56:51.654403 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
E0704 01:56:51.664760 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
E0704 01:56:51.685075 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
E0704 01:56:51.725492 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
E0704 01:56:51.805788 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
E0704 01:56:51.966141 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
E0704 01:56:52.286680 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
E0704 01:56:52.927280 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
E0704 01:56:54.207597 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
E0704 01:56:56.768728 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
E0704 01:57:01.889884 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
E0704 01:57:12.130834 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
ha_test.go:462: (dbg) Done: out/minikube-linux-arm64 stop -p ha-386526 -v=7 --alsologtostderr: (37.245575467s)
ha_test.go:467: (dbg) Run:  out/minikube-linux-arm64 start -p ha-386526 --wait=true -v=7 --alsologtostderr
E0704 01:57:32.611589 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
E0704 01:58:13.572614 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
ha_test.go:467: (dbg) Done: out/minikube-linux-arm64 start -p ha-386526 --wait=true -v=7 --alsologtostderr: (1m46.134249027s)
ha_test.go:472: (dbg) Run:  out/minikube-linux-arm64 node list -p ha-386526
--- PASS: TestMultiControlPlane/serial/RestartClusterKeepsNodes (143.52s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DeleteSecondaryNode (11.66s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DeleteSecondaryNode
ha_test.go:487: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 node delete m03 -v=7 --alsologtostderr
ha_test.go:487: (dbg) Done: out/minikube-linux-arm64 -p ha-386526 node delete m03 -v=7 --alsologtostderr: (10.736806158s)
ha_test.go:493: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 status -v=7 --alsologtostderr
ha_test.go:511: (dbg) Run:  kubectl get nodes
ha_test.go:519: (dbg) Run:  kubectl get nodes -o "go-template='{{range .items}}{{range .status.conditions}}{{if eq .type "Ready"}} {{.status}}{{"\n"}}{{end}}{{end}}{{end}}'"
--- PASS: TestMultiControlPlane/serial/DeleteSecondaryNode (11.66s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DegradedAfterSecondaryNodeDelete (0.55s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DegradedAfterSecondaryNodeDelete
ha_test.go:390: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/DegradedAfterSecondaryNodeDelete (0.55s)

                                                
                                    
x
+
TestMultiControlPlane/serial/StopCluster (36.05s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/StopCluster
ha_test.go:531: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 stop -v=7 --alsologtostderr
E0704 01:59:35.492833 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
ha_test.go:531: (dbg) Done: out/minikube-linux-arm64 -p ha-386526 stop -v=7 --alsologtostderr: (35.944064574s)
ha_test.go:537: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 status -v=7 --alsologtostderr
ha_test.go:537: (dbg) Non-zero exit: out/minikube-linux-arm64 -p ha-386526 status -v=7 --alsologtostderr: exit status 7 (106.492635ms)

                                                
                                                
-- stdout --
	ha-386526
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	ha-386526-m02
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	ha-386526-m04
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0704 01:59:49.789082 1273534 out.go:291] Setting OutFile to fd 1 ...
	I0704 01:59:49.789296 1273534 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:59:49.789327 1273534 out.go:304] Setting ErrFile to fd 2...
	I0704 01:59:49.789348 1273534 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 01:59:49.789615 1273534 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
	I0704 01:59:49.789817 1273534 out.go:298] Setting JSON to false
	I0704 01:59:49.789872 1273534 mustload.go:65] Loading cluster: ha-386526
	I0704 01:59:49.789913 1273534 notify.go:220] Checking for updates...
	I0704 01:59:49.790325 1273534 config.go:182] Loaded profile config "ha-386526": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 01:59:49.790358 1273534 status.go:255] checking status of ha-386526 ...
	I0704 01:59:49.790886 1273534 cli_runner.go:164] Run: docker container inspect ha-386526 --format={{.State.Status}}
	I0704 01:59:49.807271 1273534 status.go:330] ha-386526 host status = "Stopped" (err=<nil>)
	I0704 01:59:49.807290 1273534 status.go:343] host is not running, skipping remaining checks
	I0704 01:59:49.807298 1273534 status.go:257] ha-386526 status: &{Name:ha-386526 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0704 01:59:49.807328 1273534 status.go:255] checking status of ha-386526-m02 ...
	I0704 01:59:49.807747 1273534 cli_runner.go:164] Run: docker container inspect ha-386526-m02 --format={{.State.Status}}
	I0704 01:59:49.826911 1273534 status.go:330] ha-386526-m02 host status = "Stopped" (err=<nil>)
	I0704 01:59:49.826938 1273534 status.go:343] host is not running, skipping remaining checks
	I0704 01:59:49.826946 1273534 status.go:257] ha-386526-m02 status: &{Name:ha-386526-m02 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0704 01:59:49.826967 1273534 status.go:255] checking status of ha-386526-m04 ...
	I0704 01:59:49.827272 1273534 cli_runner.go:164] Run: docker container inspect ha-386526-m04 --format={{.State.Status}}
	I0704 01:59:49.847565 1273534 status.go:330] ha-386526-m04 host status = "Stopped" (err=<nil>)
	I0704 01:59:49.847591 1273534 status.go:343] host is not running, skipping remaining checks
	I0704 01:59:49.847599 1273534 status.go:257] ha-386526-m04 status: &{Name:ha-386526-m04 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
--- PASS: TestMultiControlPlane/serial/StopCluster (36.05s)

                                                
                                    
x
+
TestMultiControlPlane/serial/RestartCluster (79.85s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/RestartCluster
ha_test.go:560: (dbg) Run:  out/minikube-linux-arm64 start -p ha-386526 --wait=true -v=7 --alsologtostderr --driver=docker  --container-runtime=containerd
E0704 02:00:53.143546 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
ha_test.go:560: (dbg) Done: out/minikube-linux-arm64 start -p ha-386526 --wait=true -v=7 --alsologtostderr --driver=docker  --container-runtime=containerd: (1m18.890046792s)
ha_test.go:566: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 status -v=7 --alsologtostderr
ha_test.go:584: (dbg) Run:  kubectl get nodes
ha_test.go:592: (dbg) Run:  kubectl get nodes -o "go-template='{{range .items}}{{range .status.conditions}}{{if eq .type "Ready"}} {{.status}}{{"\n"}}{{end}}{{end}}{{end}}'"
--- PASS: TestMultiControlPlane/serial/RestartCluster (79.85s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DegradedAfterClusterRestart (0.6s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DegradedAfterClusterRestart
ha_test.go:390: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/DegradedAfterClusterRestart (0.60s)

                                                
                                    
x
+
TestMultiControlPlane/serial/AddSecondaryNode (42.78s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/AddSecondaryNode
ha_test.go:605: (dbg) Run:  out/minikube-linux-arm64 node add -p ha-386526 --control-plane -v=7 --alsologtostderr
E0704 02:01:51.649452 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
ha_test.go:605: (dbg) Done: out/minikube-linux-arm64 node add -p ha-386526 --control-plane -v=7 --alsologtostderr: (41.726655209s)
ha_test.go:611: (dbg) Run:  out/minikube-linux-arm64 -p ha-386526 status -v=7 --alsologtostderr
ha_test.go:611: (dbg) Done: out/minikube-linux-arm64 -p ha-386526 status -v=7 --alsologtostderr: (1.056278683s)
--- PASS: TestMultiControlPlane/serial/AddSecondaryNode (42.78s)

                                                
                                    
x
+
TestMultiControlPlane/serial/HAppyAfterSecondaryNodeAdd (0.8s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/HAppyAfterSecondaryNodeAdd
ha_test.go:281: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/HAppyAfterSecondaryNodeAdd (0.80s)

                                                
                                    
x
+
TestJSONOutput/start/Command (81.82s)

                                                
                                                
=== RUN   TestJSONOutput/start/Command
json_output_test.go:63: (dbg) Run:  out/minikube-linux-arm64 start -p json-output-636962 --output=json --user=testUser --memory=2200 --wait=true --driver=docker  --container-runtime=containerd
E0704 02:02:19.333105 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
json_output_test.go:63: (dbg) Done: out/minikube-linux-arm64 start -p json-output-636962 --output=json --user=testUser --memory=2200 --wait=true --driver=docker  --container-runtime=containerd: (1m21.813577377s)
--- PASS: TestJSONOutput/start/Command (81.82s)

                                                
                                    
x
+
TestJSONOutput/start/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/start/Audit
--- PASS: TestJSONOutput/start/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/start/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/start/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/start/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/start/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/start/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/start/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/start/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/start/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/start/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/start/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/pause/Command (0.74s)

                                                
                                                
=== RUN   TestJSONOutput/pause/Command
json_output_test.go:63: (dbg) Run:  out/minikube-linux-arm64 pause -p json-output-636962 --output=json --user=testUser
--- PASS: TestJSONOutput/pause/Command (0.74s)

                                                
                                    
x
+
TestJSONOutput/pause/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/pause/Audit
--- PASS: TestJSONOutput/pause/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/pause/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/pause/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/pause/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/pause/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/pause/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/pause/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/pause/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/pause/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/pause/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/pause/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/unpause/Command (0.68s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/Command
json_output_test.go:63: (dbg) Run:  out/minikube-linux-arm64 unpause -p json-output-636962 --output=json --user=testUser
--- PASS: TestJSONOutput/unpause/Command (0.68s)

                                                
                                    
x
+
TestJSONOutput/unpause/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/Audit
--- PASS: TestJSONOutput/unpause/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/unpause/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/unpause/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/unpause/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/unpause/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/unpause/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/unpause/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/unpause/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/unpause/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/stop/Command (5.79s)

                                                
                                                
=== RUN   TestJSONOutput/stop/Command
json_output_test.go:63: (dbg) Run:  out/minikube-linux-arm64 stop -p json-output-636962 --output=json --user=testUser
json_output_test.go:63: (dbg) Done: out/minikube-linux-arm64 stop -p json-output-636962 --output=json --user=testUser: (5.786889059s)
--- PASS: TestJSONOutput/stop/Command (5.79s)

                                                
                                    
x
+
TestJSONOutput/stop/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/stop/Audit
--- PASS: TestJSONOutput/stop/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/stop/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/stop/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/stop/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/stop/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/stop/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/stop/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/stop/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/stop/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/stop/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/stop/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestErrorJSONOutput (0.22s)

                                                
                                                
=== RUN   TestErrorJSONOutput
json_output_test.go:160: (dbg) Run:  out/minikube-linux-arm64 start -p json-output-error-680995 --memory=2200 --output=json --wait=true --driver=fail
json_output_test.go:160: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p json-output-error-680995 --memory=2200 --output=json --wait=true --driver=fail: exit status 56 (76.757739ms)

                                                
                                                
-- stdout --
	{"specversion":"1.0","id":"81616418-f5f9-49e2-8908-2da2b92672a8","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"0","message":"[json-output-error-680995] minikube v1.33.1 on Ubuntu 20.04 (arm64)","name":"Initial Minikube Setup","totalsteps":"19"}}
	{"specversion":"1.0","id":"27380ca2-168d-4efd-a910-3b4b76adc7a4","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_LOCATION=18859"}}
	{"specversion":"1.0","id":"cbee8581-c56a-4075-ac6f-99ac4788887e","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true"}}
	{"specversion":"1.0","id":"892f927b-522f-42ea-b5e4-a0b44df154ac","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig"}}
	{"specversion":"1.0","id":"7a4fd47f-24f0-4f90-bd8e-f8177b72faf8","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube"}}
	{"specversion":"1.0","id":"d998db2a-4ba4-4bee-87ea-017b0de887d9","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_BIN=out/minikube-linux-arm64"}}
	{"specversion":"1.0","id":"c3667192-8d1e-4143-93bb-f769d687bd3c","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_FORCE_SYSTEMD="}}
	{"specversion":"1.0","id":"dfeffdfb-a0a3-4b4d-9904-3d4796e20082","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.error","datacontenttype":"application/json","data":{"advice":"","exitcode":"56","issues":"","message":"The driver 'fail' is not supported on linux/arm64","name":"DRV_UNSUPPORTED_OS","url":""}}

                                                
                                                
-- /stdout --
helpers_test.go:175: Cleaning up "json-output-error-680995" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p json-output-error-680995
--- PASS: TestErrorJSONOutput (0.22s)

                                                
                                    
x
+
TestKicCustomNetwork/create_custom_network (39.18s)

                                                
                                                
=== RUN   TestKicCustomNetwork/create_custom_network
kic_custom_network_test.go:57: (dbg) Run:  out/minikube-linux-arm64 start -p docker-network-800430 --network=
kic_custom_network_test.go:57: (dbg) Done: out/minikube-linux-arm64 start -p docker-network-800430 --network=: (37.118271431s)
kic_custom_network_test.go:150: (dbg) Run:  docker network ls --format {{.Name}}
helpers_test.go:175: Cleaning up "docker-network-800430" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p docker-network-800430
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p docker-network-800430: (2.030284529s)
--- PASS: TestKicCustomNetwork/create_custom_network (39.18s)

                                                
                                    
x
+
TestKicCustomNetwork/use_default_bridge_network (36.45s)

                                                
                                                
=== RUN   TestKicCustomNetwork/use_default_bridge_network
kic_custom_network_test.go:57: (dbg) Run:  out/minikube-linux-arm64 start -p docker-network-136831 --network=bridge
kic_custom_network_test.go:57: (dbg) Done: out/minikube-linux-arm64 start -p docker-network-136831 --network=bridge: (34.379621006s)
kic_custom_network_test.go:150: (dbg) Run:  docker network ls --format {{.Name}}
helpers_test.go:175: Cleaning up "docker-network-136831" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p docker-network-136831
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p docker-network-136831: (2.043307098s)
--- PASS: TestKicCustomNetwork/use_default_bridge_network (36.45s)

                                                
                                    
x
+
TestKicExistingNetwork (36.23s)

                                                
                                                
=== RUN   TestKicExistingNetwork
kic_custom_network_test.go:150: (dbg) Run:  docker network ls --format {{.Name}}
kic_custom_network_test.go:93: (dbg) Run:  out/minikube-linux-arm64 start -p existing-network-406447 --network=existing-network
kic_custom_network_test.go:93: (dbg) Done: out/minikube-linux-arm64 start -p existing-network-406447 --network=existing-network: (33.98816987s)
helpers_test.go:175: Cleaning up "existing-network-406447" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p existing-network-406447
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p existing-network-406447: (2.081095219s)
--- PASS: TestKicExistingNetwork (36.23s)

                                                
                                    
x
+
TestKicCustomSubnet (33.66s)

                                                
                                                
=== RUN   TestKicCustomSubnet
kic_custom_network_test.go:112: (dbg) Run:  out/minikube-linux-arm64 start -p custom-subnet-996134 --subnet=192.168.60.0/24
E0704 02:05:53.144483 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
kic_custom_network_test.go:112: (dbg) Done: out/minikube-linux-arm64 start -p custom-subnet-996134 --subnet=192.168.60.0/24: (31.416570383s)
kic_custom_network_test.go:161: (dbg) Run:  docker network inspect custom-subnet-996134 --format "{{(index .IPAM.Config 0).Subnet}}"
helpers_test.go:175: Cleaning up "custom-subnet-996134" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p custom-subnet-996134
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p custom-subnet-996134: (2.217747629s)
--- PASS: TestKicCustomSubnet (33.66s)

                                                
                                    
x
+
TestKicStaticIP (34.45s)

                                                
                                                
=== RUN   TestKicStaticIP
kic_custom_network_test.go:132: (dbg) Run:  out/minikube-linux-arm64 start -p static-ip-433467 --static-ip=192.168.200.200
kic_custom_network_test.go:132: (dbg) Done: out/minikube-linux-arm64 start -p static-ip-433467 --static-ip=192.168.200.200: (32.242815707s)
kic_custom_network_test.go:138: (dbg) Run:  out/minikube-linux-arm64 -p static-ip-433467 ip
helpers_test.go:175: Cleaning up "static-ip-433467" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p static-ip-433467
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p static-ip-433467: (2.046511358s)
--- PASS: TestKicStaticIP (34.45s)

                                                
                                    
x
+
TestMainNoArgs (0.06s)

                                                
                                                
=== RUN   TestMainNoArgs
main_test.go:68: (dbg) Run:  out/minikube-linux-arm64
--- PASS: TestMainNoArgs (0.06s)

                                                
                                    
x
+
TestMinikubeProfile (63.83s)

                                                
                                                
=== RUN   TestMinikubeProfile
minikube_profile_test.go:44: (dbg) Run:  out/minikube-linux-arm64 start -p first-781507 --driver=docker  --container-runtime=containerd
E0704 02:06:51.649210 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
minikube_profile_test.go:44: (dbg) Done: out/minikube-linux-arm64 start -p first-781507 --driver=docker  --container-runtime=containerd: (29.888558932s)
minikube_profile_test.go:44: (dbg) Run:  out/minikube-linux-arm64 start -p second-787964 --driver=docker  --container-runtime=containerd
E0704 02:07:16.191963 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
minikube_profile_test.go:44: (dbg) Done: out/minikube-linux-arm64 start -p second-787964 --driver=docker  --container-runtime=containerd: (28.787473071s)
minikube_profile_test.go:51: (dbg) Run:  out/minikube-linux-arm64 profile first-781507
minikube_profile_test.go:55: (dbg) Run:  out/minikube-linux-arm64 profile list -ojson
minikube_profile_test.go:51: (dbg) Run:  out/minikube-linux-arm64 profile second-787964
minikube_profile_test.go:55: (dbg) Run:  out/minikube-linux-arm64 profile list -ojson
helpers_test.go:175: Cleaning up "second-787964" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p second-787964
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p second-787964: (1.955709789s)
helpers_test.go:175: Cleaning up "first-781507" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p first-781507
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p first-781507: (1.983089026s)
--- PASS: TestMinikubeProfile (63.83s)

                                                
                                    
x
+
TestMountStart/serial/StartWithMountFirst (6.26s)

                                                
                                                
=== RUN   TestMountStart/serial/StartWithMountFirst
mount_start_test.go:98: (dbg) Run:  out/minikube-linux-arm64 start -p mount-start-1-345513 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46464 --mount-uid 0 --no-kubernetes --driver=docker  --container-runtime=containerd
mount_start_test.go:98: (dbg) Done: out/minikube-linux-arm64 start -p mount-start-1-345513 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46464 --mount-uid 0 --no-kubernetes --driver=docker  --container-runtime=containerd: (5.26082063s)
--- PASS: TestMountStart/serial/StartWithMountFirst (6.26s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountFirst (0.28s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountFirst
mount_start_test.go:114: (dbg) Run:  out/minikube-linux-arm64 -p mount-start-1-345513 ssh -- ls /minikube-host
--- PASS: TestMountStart/serial/VerifyMountFirst (0.28s)

                                                
                                    
x
+
TestMountStart/serial/StartWithMountSecond (6.66s)

                                                
                                                
=== RUN   TestMountStart/serial/StartWithMountSecond
mount_start_test.go:98: (dbg) Run:  out/minikube-linux-arm64 start -p mount-start-2-358945 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46465 --mount-uid 0 --no-kubernetes --driver=docker  --container-runtime=containerd
mount_start_test.go:98: (dbg) Done: out/minikube-linux-arm64 start -p mount-start-2-358945 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46465 --mount-uid 0 --no-kubernetes --driver=docker  --container-runtime=containerd: (5.654741107s)
--- PASS: TestMountStart/serial/StartWithMountSecond (6.66s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountSecond (0.26s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountSecond
mount_start_test.go:114: (dbg) Run:  out/minikube-linux-arm64 -p mount-start-2-358945 ssh -- ls /minikube-host
--- PASS: TestMountStart/serial/VerifyMountSecond (0.26s)

                                                
                                    
x
+
TestMountStart/serial/DeleteFirst (1.64s)

                                                
                                                
=== RUN   TestMountStart/serial/DeleteFirst
pause_test.go:132: (dbg) Run:  out/minikube-linux-arm64 delete -p mount-start-1-345513 --alsologtostderr -v=5
pause_test.go:132: (dbg) Done: out/minikube-linux-arm64 delete -p mount-start-1-345513 --alsologtostderr -v=5: (1.637650804s)
--- PASS: TestMountStart/serial/DeleteFirst (1.64s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountPostDelete (0.25s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountPostDelete
mount_start_test.go:114: (dbg) Run:  out/minikube-linux-arm64 -p mount-start-2-358945 ssh -- ls /minikube-host
--- PASS: TestMountStart/serial/VerifyMountPostDelete (0.25s)

                                                
                                    
x
+
TestMountStart/serial/Stop (1.21s)

                                                
                                                
=== RUN   TestMountStart/serial/Stop
mount_start_test.go:155: (dbg) Run:  out/minikube-linux-arm64 stop -p mount-start-2-358945
mount_start_test.go:155: (dbg) Done: out/minikube-linux-arm64 stop -p mount-start-2-358945: (1.208898567s)
--- PASS: TestMountStart/serial/Stop (1.21s)

                                                
                                    
x
+
TestMountStart/serial/RestartStopped (7.78s)

                                                
                                                
=== RUN   TestMountStart/serial/RestartStopped
mount_start_test.go:166: (dbg) Run:  out/minikube-linux-arm64 start -p mount-start-2-358945
mount_start_test.go:166: (dbg) Done: out/minikube-linux-arm64 start -p mount-start-2-358945: (6.780029103s)
--- PASS: TestMountStart/serial/RestartStopped (7.78s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountPostStop (0.28s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountPostStop
mount_start_test.go:114: (dbg) Run:  out/minikube-linux-arm64 -p mount-start-2-358945 ssh -- ls /minikube-host
--- PASS: TestMountStart/serial/VerifyMountPostStop (0.28s)

                                                
                                    
x
+
TestMultiNode/serial/FreshStart2Nodes (73.49s)

                                                
                                                
=== RUN   TestMultiNode/serial/FreshStart2Nodes
multinode_test.go:96: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-769159 --wait=true --memory=2200 --nodes=2 -v=8 --alsologtostderr --driver=docker  --container-runtime=containerd
multinode_test.go:96: (dbg) Done: out/minikube-linux-arm64 start -p multinode-769159 --wait=true --memory=2200 --nodes=2 -v=8 --alsologtostderr --driver=docker  --container-runtime=containerd: (1m12.973721991s)
multinode_test.go:102: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 status --alsologtostderr
--- PASS: TestMultiNode/serial/FreshStart2Nodes (73.49s)

                                                
                                    
x
+
TestMultiNode/serial/DeployApp2Nodes (4.3s)

                                                
                                                
=== RUN   TestMultiNode/serial/DeployApp2Nodes
multinode_test.go:493: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-769159 -- apply -f ./testdata/multinodes/multinode-pod-dns-test.yaml
multinode_test.go:498: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-769159 -- rollout status deployment/busybox
multinode_test.go:498: (dbg) Done: out/minikube-linux-arm64 kubectl -p multinode-769159 -- rollout status deployment/busybox: (2.421335799s)
multinode_test.go:505: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-769159 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:528: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-769159 -- get pods -o jsonpath='{.items[*].metadata.name}'
multinode_test.go:536: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-769159 -- exec busybox-fc5497c4f-gpksl -- nslookup kubernetes.io
multinode_test.go:536: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-769159 -- exec busybox-fc5497c4f-xzddp -- nslookup kubernetes.io
multinode_test.go:546: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-769159 -- exec busybox-fc5497c4f-gpksl -- nslookup kubernetes.default
multinode_test.go:546: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-769159 -- exec busybox-fc5497c4f-xzddp -- nslookup kubernetes.default
multinode_test.go:554: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-769159 -- exec busybox-fc5497c4f-gpksl -- nslookup kubernetes.default.svc.cluster.local
multinode_test.go:554: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-769159 -- exec busybox-fc5497c4f-xzddp -- nslookup kubernetes.default.svc.cluster.local
--- PASS: TestMultiNode/serial/DeployApp2Nodes (4.30s)

                                                
                                    
x
+
TestMultiNode/serial/PingHostFrom2Pods (0.94s)

                                                
                                                
=== RUN   TestMultiNode/serial/PingHostFrom2Pods
multinode_test.go:564: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-769159 -- get pods -o jsonpath='{.items[*].metadata.name}'
multinode_test.go:572: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-769159 -- exec busybox-fc5497c4f-gpksl -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
multinode_test.go:583: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-769159 -- exec busybox-fc5497c4f-gpksl -- sh -c "ping -c 1 192.168.67.1"
multinode_test.go:572: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-769159 -- exec busybox-fc5497c4f-xzddp -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
multinode_test.go:583: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-769159 -- exec busybox-fc5497c4f-xzddp -- sh -c "ping -c 1 192.168.67.1"
--- PASS: TestMultiNode/serial/PingHostFrom2Pods (0.94s)

                                                
                                    
x
+
TestMultiNode/serial/AddNode (16.26s)

                                                
                                                
=== RUN   TestMultiNode/serial/AddNode
multinode_test.go:121: (dbg) Run:  out/minikube-linux-arm64 node add -p multinode-769159 -v 3 --alsologtostderr
multinode_test.go:121: (dbg) Done: out/minikube-linux-arm64 node add -p multinode-769159 -v 3 --alsologtostderr: (15.554929703s)
multinode_test.go:127: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 status --alsologtostderr
--- PASS: TestMultiNode/serial/AddNode (16.26s)

                                                
                                    
x
+
TestMultiNode/serial/MultiNodeLabels (0.1s)

                                                
                                                
=== RUN   TestMultiNode/serial/MultiNodeLabels
multinode_test.go:221: (dbg) Run:  kubectl --context multinode-769159 get nodes -o "jsonpath=[{range .items[*]}{.metadata.labels},{end}]"
--- PASS: TestMultiNode/serial/MultiNodeLabels (0.10s)

                                                
                                    
x
+
TestMultiNode/serial/ProfileList (0.36s)

                                                
                                                
=== RUN   TestMultiNode/serial/ProfileList
multinode_test.go:143: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiNode/serial/ProfileList (0.36s)

                                                
                                    
x
+
TestMultiNode/serial/CopyFile (10.43s)

                                                
                                                
=== RUN   TestMultiNode/serial/CopyFile
multinode_test.go:184: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 status --output json --alsologtostderr
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 cp testdata/cp-test.txt multinode-769159:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 ssh -n multinode-769159 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 cp multinode-769159:/home/docker/cp-test.txt /tmp/TestMultiNodeserialCopyFile2758996403/001/cp-test_multinode-769159.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 ssh -n multinode-769159 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 cp multinode-769159:/home/docker/cp-test.txt multinode-769159-m02:/home/docker/cp-test_multinode-769159_multinode-769159-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 ssh -n multinode-769159 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 ssh -n multinode-769159-m02 "sudo cat /home/docker/cp-test_multinode-769159_multinode-769159-m02.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 cp multinode-769159:/home/docker/cp-test.txt multinode-769159-m03:/home/docker/cp-test_multinode-769159_multinode-769159-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 ssh -n multinode-769159 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 ssh -n multinode-769159-m03 "sudo cat /home/docker/cp-test_multinode-769159_multinode-769159-m03.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 cp testdata/cp-test.txt multinode-769159-m02:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 ssh -n multinode-769159-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 cp multinode-769159-m02:/home/docker/cp-test.txt /tmp/TestMultiNodeserialCopyFile2758996403/001/cp-test_multinode-769159-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 ssh -n multinode-769159-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 cp multinode-769159-m02:/home/docker/cp-test.txt multinode-769159:/home/docker/cp-test_multinode-769159-m02_multinode-769159.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 ssh -n multinode-769159-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 ssh -n multinode-769159 "sudo cat /home/docker/cp-test_multinode-769159-m02_multinode-769159.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 cp multinode-769159-m02:/home/docker/cp-test.txt multinode-769159-m03:/home/docker/cp-test_multinode-769159-m02_multinode-769159-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 ssh -n multinode-769159-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 ssh -n multinode-769159-m03 "sudo cat /home/docker/cp-test_multinode-769159-m02_multinode-769159-m03.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 cp testdata/cp-test.txt multinode-769159-m03:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 ssh -n multinode-769159-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 cp multinode-769159-m03:/home/docker/cp-test.txt /tmp/TestMultiNodeserialCopyFile2758996403/001/cp-test_multinode-769159-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 ssh -n multinode-769159-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 cp multinode-769159-m03:/home/docker/cp-test.txt multinode-769159:/home/docker/cp-test_multinode-769159-m03_multinode-769159.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 ssh -n multinode-769159-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 ssh -n multinode-769159 "sudo cat /home/docker/cp-test_multinode-769159-m03_multinode-769159.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 cp multinode-769159-m03:/home/docker/cp-test.txt multinode-769159-m02:/home/docker/cp-test_multinode-769159-m03_multinode-769159-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 ssh -n multinode-769159-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 ssh -n multinode-769159-m02 "sudo cat /home/docker/cp-test_multinode-769159-m03_multinode-769159-m02.txt"
--- PASS: TestMultiNode/serial/CopyFile (10.43s)

                                                
                                    
x
+
TestMultiNode/serial/StopNode (2.23s)

                                                
                                                
=== RUN   TestMultiNode/serial/StopNode
multinode_test.go:248: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 node stop m03
multinode_test.go:248: (dbg) Done: out/minikube-linux-arm64 -p multinode-769159 node stop m03: (1.214140687s)
multinode_test.go:254: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 status
multinode_test.go:254: (dbg) Non-zero exit: out/minikube-linux-arm64 -p multinode-769159 status: exit status 7 (502.731424ms)

                                                
                                                
-- stdout --
	multinode-769159
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	multinode-769159-m02
	type: Worker
	host: Running
	kubelet: Running
	
	multinode-769159-m03
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
multinode_test.go:261: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 status --alsologtostderr
multinode_test.go:261: (dbg) Non-zero exit: out/minikube-linux-arm64 -p multinode-769159 status --alsologtostderr: exit status 7 (512.906823ms)

                                                
                                                
-- stdout --
	multinode-769159
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	multinode-769159-m02
	type: Worker
	host: Running
	kubelet: Running
	
	multinode-769159-m03
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0704 02:09:53.097581 1327535 out.go:291] Setting OutFile to fd 1 ...
	I0704 02:09:53.097759 1327535 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 02:09:53.097790 1327535 out.go:304] Setting ErrFile to fd 2...
	I0704 02:09:53.097811 1327535 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 02:09:53.098062 1327535 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
	I0704 02:09:53.098290 1327535 out.go:298] Setting JSON to false
	I0704 02:09:53.098349 1327535 mustload.go:65] Loading cluster: multinode-769159
	I0704 02:09:53.098445 1327535 notify.go:220] Checking for updates...
	I0704 02:09:53.098874 1327535 config.go:182] Loaded profile config "multinode-769159": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 02:09:53.098910 1327535 status.go:255] checking status of multinode-769159 ...
	I0704 02:09:53.099420 1327535 cli_runner.go:164] Run: docker container inspect multinode-769159 --format={{.State.Status}}
	I0704 02:09:53.118049 1327535 status.go:330] multinode-769159 host status = "Running" (err=<nil>)
	I0704 02:09:53.118076 1327535 host.go:66] Checking if "multinode-769159" exists ...
	I0704 02:09:53.118393 1327535 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-769159
	I0704 02:09:53.142814 1327535 host.go:66] Checking if "multinode-769159" exists ...
	I0704 02:09:53.143132 1327535 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0704 02:09:53.143250 1327535 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-769159
	I0704 02:09:53.163430 1327535 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34081 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/multinode-769159/id_rsa Username:docker}
	I0704 02:09:53.264781 1327535 ssh_runner.go:195] Run: systemctl --version
	I0704 02:09:53.269211 1327535 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0704 02:09:53.282254 1327535 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 02:09:53.348232 1327535 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:3 ContainersRunning:2 ContainersPaused:0 ContainersStopped:1 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:40 OomKillDisable:true NGoroutines:61 SystemTime:2024-07-04 02:09:53.338387702 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 02:09:53.348833 1327535 kubeconfig.go:125] found "multinode-769159" server: "https://192.168.67.2:8443"
	I0704 02:09:53.348865 1327535 api_server.go:166] Checking apiserver status ...
	I0704 02:09:53.348911 1327535 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0704 02:09:53.360035 1327535 ssh_runner.go:195] Run: sudo egrep ^[0-9]+:freezer: /proc/1451/cgroup
	I0704 02:09:53.369540 1327535 api_server.go:182] apiserver freezer: "8:freezer:/docker/33668b0c1b2aac2fb4ecb86251e1c5adc1af53052b61ccdb986b390764814519/kubepods/burstable/pod993a40bae135615b2a1115b1388e1543/74d6d3acaa896c67d872d1ee758fff817972788eb486dca46035876c29082b63"
	I0704 02:09:53.369649 1327535 ssh_runner.go:195] Run: sudo cat /sys/fs/cgroup/freezer/docker/33668b0c1b2aac2fb4ecb86251e1c5adc1af53052b61ccdb986b390764814519/kubepods/burstable/pod993a40bae135615b2a1115b1388e1543/74d6d3acaa896c67d872d1ee758fff817972788eb486dca46035876c29082b63/freezer.state
	I0704 02:09:53.378660 1327535 api_server.go:204] freezer state: "THAWED"
	I0704 02:09:53.378688 1327535 api_server.go:253] Checking apiserver healthz at https://192.168.67.2:8443/healthz ...
	I0704 02:09:53.386537 1327535 api_server.go:279] https://192.168.67.2:8443/healthz returned 200:
	ok
	I0704 02:09:53.386571 1327535 status.go:422] multinode-769159 apiserver status = Running (err=<nil>)
	I0704 02:09:53.386582 1327535 status.go:257] multinode-769159 status: &{Name:multinode-769159 Host:Running Kubelet:Running APIServer:Running Kubeconfig:Configured Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0704 02:09:53.386599 1327535 status.go:255] checking status of multinode-769159-m02 ...
	I0704 02:09:53.386939 1327535 cli_runner.go:164] Run: docker container inspect multinode-769159-m02 --format={{.State.Status}}
	I0704 02:09:53.403188 1327535 status.go:330] multinode-769159-m02 host status = "Running" (err=<nil>)
	I0704 02:09:53.403211 1327535 host.go:66] Checking if "multinode-769159-m02" exists ...
	I0704 02:09:53.403556 1327535 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-769159-m02
	I0704 02:09:53.420060 1327535 host.go:66] Checking if "multinode-769159-m02" exists ...
	I0704 02:09:53.420410 1327535 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0704 02:09:53.420455 1327535 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-769159-m02
	I0704 02:09:53.436655 1327535 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34086 SSHKeyPath:/home/jenkins/minikube-integration/18859-1190282/.minikube/machines/multinode-769159-m02/id_rsa Username:docker}
	I0704 02:09:53.532497 1327535 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0704 02:09:53.544112 1327535 status.go:257] multinode-769159-m02 status: &{Name:multinode-769159-m02 Host:Running Kubelet:Running APIServer:Irrelevant Kubeconfig:Irrelevant Worker:true TimeToStop: DockerEnv: PodManEnv:}
	I0704 02:09:53.544146 1327535 status.go:255] checking status of multinode-769159-m03 ...
	I0704 02:09:53.544484 1327535 cli_runner.go:164] Run: docker container inspect multinode-769159-m03 --format={{.State.Status}}
	I0704 02:09:53.560712 1327535 status.go:330] multinode-769159-m03 host status = "Stopped" (err=<nil>)
	I0704 02:09:53.560737 1327535 status.go:343] host is not running, skipping remaining checks
	I0704 02:09:53.560744 1327535 status.go:257] multinode-769159-m03 status: &{Name:multinode-769159-m03 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
--- PASS: TestMultiNode/serial/StopNode (2.23s)

                                                
                                    
x
+
TestMultiNode/serial/StartAfterStop (10.73s)

                                                
                                                
=== RUN   TestMultiNode/serial/StartAfterStop
multinode_test.go:282: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 node start m03 -v=7 --alsologtostderr
multinode_test.go:282: (dbg) Done: out/minikube-linux-arm64 -p multinode-769159 node start m03 -v=7 --alsologtostderr: (9.960069162s)
multinode_test.go:290: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 status -v=7 --alsologtostderr
multinode_test.go:306: (dbg) Run:  kubectl get nodes
--- PASS: TestMultiNode/serial/StartAfterStop (10.73s)

                                                
                                    
x
+
TestMultiNode/serial/RestartKeepsNodes (81.08s)

                                                
                                                
=== RUN   TestMultiNode/serial/RestartKeepsNodes
multinode_test.go:314: (dbg) Run:  out/minikube-linux-arm64 node list -p multinode-769159
multinode_test.go:321: (dbg) Run:  out/minikube-linux-arm64 stop -p multinode-769159
multinode_test.go:321: (dbg) Done: out/minikube-linux-arm64 stop -p multinode-769159: (24.980138891s)
multinode_test.go:326: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-769159 --wait=true -v=8 --alsologtostderr
E0704 02:10:53.143712 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
multinode_test.go:326: (dbg) Done: out/minikube-linux-arm64 start -p multinode-769159 --wait=true -v=8 --alsologtostderr: (55.985674928s)
multinode_test.go:331: (dbg) Run:  out/minikube-linux-arm64 node list -p multinode-769159
--- PASS: TestMultiNode/serial/RestartKeepsNodes (81.08s)

                                                
                                    
x
+
TestMultiNode/serial/DeleteNode (5.49s)

                                                
                                                
=== RUN   TestMultiNode/serial/DeleteNode
multinode_test.go:416: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 node delete m03
multinode_test.go:416: (dbg) Done: out/minikube-linux-arm64 -p multinode-769159 node delete m03: (4.825578549s)
multinode_test.go:422: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 status --alsologtostderr
multinode_test.go:436: (dbg) Run:  kubectl get nodes
multinode_test.go:444: (dbg) Run:  kubectl get nodes -o "go-template='{{range .items}}{{range .status.conditions}}{{if eq .type "Ready"}} {{.status}}{{"\n"}}{{end}}{{end}}{{end}}'"
--- PASS: TestMultiNode/serial/DeleteNode (5.49s)

                                                
                                    
x
+
TestMultiNode/serial/StopMultiNode (24s)

                                                
                                                
=== RUN   TestMultiNode/serial/StopMultiNode
multinode_test.go:345: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 stop
E0704 02:11:51.649207 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
multinode_test.go:345: (dbg) Done: out/minikube-linux-arm64 -p multinode-769159 stop: (23.820203754s)
multinode_test.go:351: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 status
multinode_test.go:351: (dbg) Non-zero exit: out/minikube-linux-arm64 -p multinode-769159 status: exit status 7 (92.886006ms)

                                                
                                                
-- stdout --
	multinode-769159
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	multinode-769159-m02
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
multinode_test.go:358: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 status --alsologtostderr
multinode_test.go:358: (dbg) Non-zero exit: out/minikube-linux-arm64 -p multinode-769159 status --alsologtostderr: exit status 7 (86.616703ms)

                                                
                                                
-- stdout --
	multinode-769159
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	multinode-769159-m02
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0704 02:11:54.821102 1335510 out.go:291] Setting OutFile to fd 1 ...
	I0704 02:11:54.821276 1335510 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 02:11:54.821287 1335510 out.go:304] Setting ErrFile to fd 2...
	I0704 02:11:54.821294 1335510 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 02:11:54.821581 1335510 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
	I0704 02:11:54.821791 1335510 out.go:298] Setting JSON to false
	I0704 02:11:54.821830 1335510 mustload.go:65] Loading cluster: multinode-769159
	I0704 02:11:54.821939 1335510 notify.go:220] Checking for updates...
	I0704 02:11:54.822296 1335510 config.go:182] Loaded profile config "multinode-769159": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 02:11:54.822314 1335510 status.go:255] checking status of multinode-769159 ...
	I0704 02:11:54.823133 1335510 cli_runner.go:164] Run: docker container inspect multinode-769159 --format={{.State.Status}}
	I0704 02:11:54.841467 1335510 status.go:330] multinode-769159 host status = "Stopped" (err=<nil>)
	I0704 02:11:54.841493 1335510 status.go:343] host is not running, skipping remaining checks
	I0704 02:11:54.841501 1335510 status.go:257] multinode-769159 status: &{Name:multinode-769159 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0704 02:11:54.841533 1335510 status.go:255] checking status of multinode-769159-m02 ...
	I0704 02:11:54.841871 1335510 cli_runner.go:164] Run: docker container inspect multinode-769159-m02 --format={{.State.Status}}
	I0704 02:11:54.861695 1335510 status.go:330] multinode-769159-m02 host status = "Stopped" (err=<nil>)
	I0704 02:11:54.861719 1335510 status.go:343] host is not running, skipping remaining checks
	I0704 02:11:54.861728 1335510 status.go:257] multinode-769159-m02 status: &{Name:multinode-769159-m02 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
--- PASS: TestMultiNode/serial/StopMultiNode (24.00s)

                                                
                                    
x
+
TestMultiNode/serial/RestartMultiNode (49.59s)

                                                
                                                
=== RUN   TestMultiNode/serial/RestartMultiNode
multinode_test.go:376: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-769159 --wait=true -v=8 --alsologtostderr --driver=docker  --container-runtime=containerd
multinode_test.go:376: (dbg) Done: out/minikube-linux-arm64 start -p multinode-769159 --wait=true -v=8 --alsologtostderr --driver=docker  --container-runtime=containerd: (48.915788708s)
multinode_test.go:382: (dbg) Run:  out/minikube-linux-arm64 -p multinode-769159 status --alsologtostderr
multinode_test.go:396: (dbg) Run:  kubectl get nodes
multinode_test.go:404: (dbg) Run:  kubectl get nodes -o "go-template='{{range .items}}{{range .status.conditions}}{{if eq .type "Ready"}} {{.status}}{{"\n"}}{{end}}{{end}}{{end}}'"
--- PASS: TestMultiNode/serial/RestartMultiNode (49.59s)

                                                
                                    
x
+
TestMultiNode/serial/ValidateNameConflict (31.8s)

                                                
                                                
=== RUN   TestMultiNode/serial/ValidateNameConflict
multinode_test.go:455: (dbg) Run:  out/minikube-linux-arm64 node list -p multinode-769159
multinode_test.go:464: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-769159-m02 --driver=docker  --container-runtime=containerd
multinode_test.go:464: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p multinode-769159-m02 --driver=docker  --container-runtime=containerd: exit status 14 (72.452831ms)

                                                
                                                
-- stdout --
	* [multinode-769159-m02] minikube v1.33.1 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=18859
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	! Profile name 'multinode-769159-m02' is duplicated with machine name 'multinode-769159-m02' in profile 'multinode-769159'
	X Exiting due to MK_USAGE: Profile name should be unique

                                                
                                                
** /stderr **
multinode_test.go:472: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-769159-m03 --driver=docker  --container-runtime=containerd
multinode_test.go:472: (dbg) Done: out/minikube-linux-arm64 start -p multinode-769159-m03 --driver=docker  --container-runtime=containerd: (29.417162749s)
multinode_test.go:479: (dbg) Run:  out/minikube-linux-arm64 node add -p multinode-769159
multinode_test.go:479: (dbg) Non-zero exit: out/minikube-linux-arm64 node add -p multinode-769159: exit status 80 (313.608583ms)

                                                
                                                
-- stdout --
	* Adding node m03 to cluster multinode-769159 as [worker]
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to GUEST_NODE_ADD: failed to add node: Node multinode-769159-m03 already exists in multinode-769159-m03 profile
	* 
	╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│    * Please also attach the following file to the GitHub issue:                             │
	│    * - /tmp/minikube_node_040ea7097fd6ed71e65be9a474587f81f0ccd21d_1.log                    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯

                                                
                                                
** /stderr **
multinode_test.go:484: (dbg) Run:  out/minikube-linux-arm64 delete -p multinode-769159-m03
E0704 02:13:14.693858 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
multinode_test.go:484: (dbg) Done: out/minikube-linux-arm64 delete -p multinode-769159-m03: (1.943568237s)
--- PASS: TestMultiNode/serial/ValidateNameConflict (31.80s)

                                                
                                    
x
+
TestPreload (109.39s)

                                                
                                                
=== RUN   TestPreload
preload_test.go:44: (dbg) Run:  out/minikube-linux-arm64 start -p test-preload-225154 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=docker  --container-runtime=containerd --kubernetes-version=v1.24.4
preload_test.go:44: (dbg) Done: out/minikube-linux-arm64 start -p test-preload-225154 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=docker  --container-runtime=containerd --kubernetes-version=v1.24.4: (1m11.484857819s)
preload_test.go:52: (dbg) Run:  out/minikube-linux-arm64 -p test-preload-225154 image pull gcr.io/k8s-minikube/busybox
preload_test.go:52: (dbg) Done: out/minikube-linux-arm64 -p test-preload-225154 image pull gcr.io/k8s-minikube/busybox: (1.262949658s)
preload_test.go:58: (dbg) Run:  out/minikube-linux-arm64 stop -p test-preload-225154
preload_test.go:58: (dbg) Done: out/minikube-linux-arm64 stop -p test-preload-225154: (12.089767696s)
preload_test.go:66: (dbg) Run:  out/minikube-linux-arm64 start -p test-preload-225154 --memory=2200 --alsologtostderr -v=1 --wait=true --driver=docker  --container-runtime=containerd
preload_test.go:66: (dbg) Done: out/minikube-linux-arm64 start -p test-preload-225154 --memory=2200 --alsologtostderr -v=1 --wait=true --driver=docker  --container-runtime=containerd: (21.783300279s)
preload_test.go:71: (dbg) Run:  out/minikube-linux-arm64 -p test-preload-225154 image list
helpers_test.go:175: Cleaning up "test-preload-225154" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p test-preload-225154
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p test-preload-225154: (2.497776691s)
--- PASS: TestPreload (109.39s)

                                                
                                    
x
+
TestScheduledStopUnix (108.03s)

                                                
                                                
=== RUN   TestScheduledStopUnix
scheduled_stop_test.go:128: (dbg) Run:  out/minikube-linux-arm64 start -p scheduled-stop-303458 --memory=2048 --driver=docker  --container-runtime=containerd
scheduled_stop_test.go:128: (dbg) Done: out/minikube-linux-arm64 start -p scheduled-stop-303458 --memory=2048 --driver=docker  --container-runtime=containerd: (31.651276426s)
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-arm64 stop -p scheduled-stop-303458 --schedule 5m
scheduled_stop_test.go:191: (dbg) Run:  out/minikube-linux-arm64 status --format={{.TimeToStop}} -p scheduled-stop-303458 -n scheduled-stop-303458
scheduled_stop_test.go:169: signal error was:  <nil>
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-arm64 stop -p scheduled-stop-303458 --schedule 15s
scheduled_stop_test.go:169: signal error was:  os: process already finished
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-arm64 stop -p scheduled-stop-303458 --cancel-scheduled
E0704 02:15:53.144472 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
scheduled_stop_test.go:176: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p scheduled-stop-303458 -n scheduled-stop-303458
scheduled_stop_test.go:205: (dbg) Run:  out/minikube-linux-arm64 status -p scheduled-stop-303458
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-arm64 stop -p scheduled-stop-303458 --schedule 15s
scheduled_stop_test.go:169: signal error was:  os: process already finished
E0704 02:16:51.651022 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
scheduled_stop_test.go:205: (dbg) Run:  out/minikube-linux-arm64 status -p scheduled-stop-303458
scheduled_stop_test.go:205: (dbg) Non-zero exit: out/minikube-linux-arm64 status -p scheduled-stop-303458: exit status 7 (65.48474ms)

                                                
                                                
-- stdout --
	scheduled-stop-303458
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	

                                                
                                                
-- /stdout --
scheduled_stop_test.go:176: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p scheduled-stop-303458 -n scheduled-stop-303458
scheduled_stop_test.go:176: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.Host}} -p scheduled-stop-303458 -n scheduled-stop-303458: exit status 7 (66.712055ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
scheduled_stop_test.go:176: status error: exit status 7 (may be ok)
helpers_test.go:175: Cleaning up "scheduled-stop-303458" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p scheduled-stop-303458
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p scheduled-stop-303458: (4.900367325s)
--- PASS: TestScheduledStopUnix (108.03s)

                                                
                                    
x
+
TestInsufficientStorage (11.32s)

                                                
                                                
=== RUN   TestInsufficientStorage
status_test.go:50: (dbg) Run:  out/minikube-linux-arm64 start -p insufficient-storage-309578 --memory=2048 --output=json --wait=true --driver=docker  --container-runtime=containerd
status_test.go:50: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p insufficient-storage-309578 --memory=2048 --output=json --wait=true --driver=docker  --container-runtime=containerd: exit status 26 (8.778470225s)

                                                
                                                
-- stdout --
	{"specversion":"1.0","id":"20149943-0aee-49dd-aa3f-87b02b4d10cd","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"0","message":"[insufficient-storage-309578] minikube v1.33.1 on Ubuntu 20.04 (arm64)","name":"Initial Minikube Setup","totalsteps":"19"}}
	{"specversion":"1.0","id":"0260d321-17e8-4f21-b673-de3f594a896d","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_LOCATION=18859"}}
	{"specversion":"1.0","id":"b3a241c7-584f-46b8-b5d7-1bb1c3248c9a","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true"}}
	{"specversion":"1.0","id":"fb06499c-c701-4d1f-a856-a049267e2ae8","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig"}}
	{"specversion":"1.0","id":"e86200fb-bd80-4755-aa4b-c72af33f365c","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube"}}
	{"specversion":"1.0","id":"3e521028-f87e-4a26-b3bf-fe93fb125e17","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_BIN=out/minikube-linux-arm64"}}
	{"specversion":"1.0","id":"74b47d27-07c2-4681-b811-cc5c3ac4589f","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_FORCE_SYSTEMD="}}
	{"specversion":"1.0","id":"733dbe6f-3d68-4460-b384-32dfa987c963","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_TEST_STORAGE_CAPACITY=100"}}
	{"specversion":"1.0","id":"16675a6e-77a6-4455-b3a2-70738303b931","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_TEST_AVAILABLE_STORAGE=19"}}
	{"specversion":"1.0","id":"aee28fc3-9f08-4f5e-9521-c36f13cd41d8","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"1","message":"Using the docker driver based on user configuration","name":"Selecting Driver","totalsteps":"19"}}
	{"specversion":"1.0","id":"e31132b2-9cba-4089-b94f-5b31c4fd7208","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"Using Docker driver with root privileges"}}
	{"specversion":"1.0","id":"5be44ea4-dffe-4943-921e-07334152bc2e","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"3","message":"Starting \"insufficient-storage-309578\" primary control-plane node in \"insufficient-storage-309578\" cluster","name":"Starting Node","totalsteps":"19"}}
	{"specversion":"1.0","id":"3f6cf414-6272-49e7-b64d-369620604de0","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"5","message":"Pulling base image v0.0.44-1719972989-19184 ...","name":"Pulling Base Image","totalsteps":"19"}}
	{"specversion":"1.0","id":"f6dd361e-9f56-4aa1-acd9-37e861a3747a","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"8","message":"Creating docker container (CPUs=2, Memory=2048MB) ...","name":"Creating Container","totalsteps":"19"}}
	{"specversion":"1.0","id":"c03a2c21-61bc-4882-84d8-e5df800fe89e","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.error","datacontenttype":"application/json","data":{"advice":"Try one or more of the following to free up space on the device:\n\t\n\t\t\t1. Run \"docker system prune\" to remove unused Docker data (optionally with \"-a\")\n\t\t\t2. Increase the storage allocated to Docker for Desktop by clicking on:\n\t\t\t\tDocker icon \u003e Preferences \u003e Resources \u003e Disk Image Size\n\t\t\t3. Run \"minikube ssh -- docker system prune\" if using the Docker container runtime","exitcode":"26","issues":"https://github.com/kubernetes/minikube/issues/9024","message":"Docker is out of disk space! (/var is at 100%% of capacity). You can pass '--force' to skip this check.","name":"RSRC_DOCKER_STORAGE","url":""}}

                                                
                                                
-- /stdout --
status_test.go:76: (dbg) Run:  out/minikube-linux-arm64 status -p insufficient-storage-309578 --output=json --layout=cluster
status_test.go:76: (dbg) Non-zero exit: out/minikube-linux-arm64 status -p insufficient-storage-309578 --output=json --layout=cluster: exit status 7 (307.809902ms)

                                                
                                                
-- stdout --
	{"Name":"insufficient-storage-309578","StatusCode":507,"StatusName":"InsufficientStorage","StatusDetail":"/var is almost out of disk space","Step":"Creating Container","StepDetail":"Creating docker container (CPUs=2, Memory=2048MB) ...","BinaryVersion":"v1.33.1","Components":{"kubeconfig":{"Name":"kubeconfig","StatusCode":500,"StatusName":"Error"}},"Nodes":[{"Name":"insufficient-storage-309578","StatusCode":507,"StatusName":"InsufficientStorage","Components":{"apiserver":{"Name":"apiserver","StatusCode":405,"StatusName":"Stopped"},"kubelet":{"Name":"kubelet","StatusCode":405,"StatusName":"Stopped"}}}]}

                                                
                                                
-- /stdout --
** stderr ** 
	E0704 02:17:06.692635 1354021 status.go:417] kubeconfig endpoint: get endpoint: "insufficient-storage-309578" does not appear in /home/jenkins/minikube-integration/18859-1190282/kubeconfig

                                                
                                                
** /stderr **
status_test.go:76: (dbg) Run:  out/minikube-linux-arm64 status -p insufficient-storage-309578 --output=json --layout=cluster
status_test.go:76: (dbg) Non-zero exit: out/minikube-linux-arm64 status -p insufficient-storage-309578 --output=json --layout=cluster: exit status 7 (325.400114ms)

                                                
                                                
-- stdout --
	{"Name":"insufficient-storage-309578","StatusCode":507,"StatusName":"InsufficientStorage","StatusDetail":"/var is almost out of disk space","BinaryVersion":"v1.33.1","Components":{"kubeconfig":{"Name":"kubeconfig","StatusCode":500,"StatusName":"Error"}},"Nodes":[{"Name":"insufficient-storage-309578","StatusCode":507,"StatusName":"InsufficientStorage","Components":{"apiserver":{"Name":"apiserver","StatusCode":405,"StatusName":"Stopped"},"kubelet":{"Name":"kubelet","StatusCode":405,"StatusName":"Stopped"}}}]}

                                                
                                                
-- /stdout --
** stderr ** 
	E0704 02:17:07.015124 1354082 status.go:417] kubeconfig endpoint: get endpoint: "insufficient-storage-309578" does not appear in /home/jenkins/minikube-integration/18859-1190282/kubeconfig
	E0704 02:17:07.026871 1354082 status.go:560] unable to read event log: stat: stat /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/insufficient-storage-309578/events.json: no such file or directory

                                                
                                                
** /stderr **
helpers_test.go:175: Cleaning up "insufficient-storage-309578" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p insufficient-storage-309578
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p insufficient-storage-309578: (1.903444925s)
--- PASS: TestInsufficientStorage (11.32s)

                                                
                                    
x
+
TestRunningBinaryUpgrade (93.29s)

                                                
                                                
=== RUN   TestRunningBinaryUpgrade
=== PAUSE TestRunningBinaryUpgrade

                                                
                                                

                                                
                                                
=== CONT  TestRunningBinaryUpgrade
version_upgrade_test.go:120: (dbg) Run:  /tmp/minikube-v1.26.0.940731877 start -p running-upgrade-123767 --memory=2200 --vm-driver=docker  --container-runtime=containerd
E0704 02:25:53.144423 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
version_upgrade_test.go:120: (dbg) Done: /tmp/minikube-v1.26.0.940731877 start -p running-upgrade-123767 --memory=2200 --vm-driver=docker  --container-runtime=containerd: (48.88770944s)
version_upgrade_test.go:130: (dbg) Run:  out/minikube-linux-arm64 start -p running-upgrade-123767 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
E0704 02:26:51.649129 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
version_upgrade_test.go:130: (dbg) Done: out/minikube-linux-arm64 start -p running-upgrade-123767 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (40.618033002s)
helpers_test.go:175: Cleaning up "running-upgrade-123767" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p running-upgrade-123767
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p running-upgrade-123767: (3.108746607s)
--- PASS: TestRunningBinaryUpgrade (93.29s)

                                                
                                    
x
+
TestKubernetesUpgrade (341.81s)

                                                
                                                
=== RUN   TestKubernetesUpgrade
=== PAUSE TestKubernetesUpgrade

                                                
                                                

                                                
                                                
=== CONT  TestKubernetesUpgrade
version_upgrade_test.go:222: (dbg) Run:  out/minikube-linux-arm64 start -p kubernetes-upgrade-622723 --memory=2200 --kubernetes-version=v1.20.0 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
E0704 02:20:53.144150 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
version_upgrade_test.go:222: (dbg) Done: out/minikube-linux-arm64 start -p kubernetes-upgrade-622723 --memory=2200 --kubernetes-version=v1.20.0 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (55.464455506s)
version_upgrade_test.go:227: (dbg) Run:  out/minikube-linux-arm64 stop -p kubernetes-upgrade-622723
version_upgrade_test.go:227: (dbg) Done: out/minikube-linux-arm64 stop -p kubernetes-upgrade-622723: (1.245153296s)
version_upgrade_test.go:232: (dbg) Run:  out/minikube-linux-arm64 -p kubernetes-upgrade-622723 status --format={{.Host}}
version_upgrade_test.go:232: (dbg) Non-zero exit: out/minikube-linux-arm64 -p kubernetes-upgrade-622723 status --format={{.Host}}: exit status 7 (64.3132ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
version_upgrade_test.go:234: status error: exit status 7 (may be ok)
version_upgrade_test.go:243: (dbg) Run:  out/minikube-linux-arm64 start -p kubernetes-upgrade-622723 --memory=2200 --kubernetes-version=v1.30.2 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
E0704 02:21:51.649253 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
version_upgrade_test.go:243: (dbg) Done: out/minikube-linux-arm64 start -p kubernetes-upgrade-622723 --memory=2200 --kubernetes-version=v1.30.2 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (4m37.388719473s)
version_upgrade_test.go:248: (dbg) Run:  kubectl --context kubernetes-upgrade-622723 version --output=json
version_upgrade_test.go:267: Attempting to downgrade Kubernetes (should fail)
version_upgrade_test.go:269: (dbg) Run:  out/minikube-linux-arm64 start -p kubernetes-upgrade-622723 --memory=2200 --kubernetes-version=v1.20.0 --driver=docker  --container-runtime=containerd
version_upgrade_test.go:269: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p kubernetes-upgrade-622723 --memory=2200 --kubernetes-version=v1.20.0 --driver=docker  --container-runtime=containerd: exit status 106 (83.657306ms)

                                                
                                                
-- stdout --
	* [kubernetes-upgrade-622723] minikube v1.33.1 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=18859
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to K8S_DOWNGRADE_UNSUPPORTED: Unable to safely downgrade existing Kubernetes v1.30.2 cluster to v1.20.0
	* Suggestion: 
	
	    1) Recreate the cluster with Kubernetes 1.20.0, by running:
	    
	    minikube delete -p kubernetes-upgrade-622723
	    minikube start -p kubernetes-upgrade-622723 --kubernetes-version=v1.20.0
	    
	    2) Create a second cluster with Kubernetes 1.20.0, by running:
	    
	    minikube start -p kubernetes-upgrade-6227232 --kubernetes-version=v1.20.0
	    
	    3) Use the existing cluster at version Kubernetes 1.30.2, by running:
	    
	    minikube start -p kubernetes-upgrade-622723 --kubernetes-version=v1.30.2
	    

                                                
                                                
** /stderr **
version_upgrade_test.go:273: Attempting restart after unsuccessful downgrade
version_upgrade_test.go:275: (dbg) Run:  out/minikube-linux-arm64 start -p kubernetes-upgrade-622723 --memory=2200 --kubernetes-version=v1.30.2 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
version_upgrade_test.go:275: (dbg) Done: out/minikube-linux-arm64 start -p kubernetes-upgrade-622723 --memory=2200 --kubernetes-version=v1.30.2 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (5.316101548s)
helpers_test.go:175: Cleaning up "kubernetes-upgrade-622723" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p kubernetes-upgrade-622723
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p kubernetes-upgrade-622723: (2.140486519s)
--- PASS: TestKubernetesUpgrade (341.81s)

                                                
                                    
x
+
TestMissingContainerUpgrade (156.88s)

                                                
                                                
=== RUN   TestMissingContainerUpgrade
=== PAUSE TestMissingContainerUpgrade

                                                
                                                

                                                
                                                
=== CONT  TestMissingContainerUpgrade
version_upgrade_test.go:309: (dbg) Run:  /tmp/minikube-v1.26.0.3996505368 start -p missing-upgrade-472944 --memory=2200 --driver=docker  --container-runtime=containerd
E0704 02:23:56.192320 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
version_upgrade_test.go:309: (dbg) Done: /tmp/minikube-v1.26.0.3996505368 start -p missing-upgrade-472944 --memory=2200 --driver=docker  --container-runtime=containerd: (1m18.544640701s)
version_upgrade_test.go:318: (dbg) Run:  docker stop missing-upgrade-472944
version_upgrade_test.go:318: (dbg) Done: docker stop missing-upgrade-472944: (10.313738053s)
version_upgrade_test.go:323: (dbg) Run:  docker rm missing-upgrade-472944
version_upgrade_test.go:329: (dbg) Run:  out/minikube-linux-arm64 start -p missing-upgrade-472944 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
version_upgrade_test.go:329: (dbg) Done: out/minikube-linux-arm64 start -p missing-upgrade-472944 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (1m4.379551977s)
helpers_test.go:175: Cleaning up "missing-upgrade-472944" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p missing-upgrade-472944
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p missing-upgrade-472944: (2.511755576s)
--- PASS: TestMissingContainerUpgrade (156.88s)

                                                
                                    
x
+
TestPause/serial/Start (99.16s)

                                                
                                                
=== RUN   TestPause/serial/Start
pause_test.go:80: (dbg) Run:  out/minikube-linux-arm64 start -p pause-736265 --memory=2048 --install-addons=false --wait=all --driver=docker  --container-runtime=containerd
pause_test.go:80: (dbg) Done: out/minikube-linux-arm64 start -p pause-736265 --memory=2048 --install-addons=false --wait=all --driver=docker  --container-runtime=containerd: (1m39.15775518s)
--- PASS: TestPause/serial/Start (99.16s)

                                                
                                    
x
+
TestNoKubernetes/serial/StartNoK8sWithVersion (0.09s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/StartNoK8sWithVersion
no_kubernetes_test.go:83: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-930680 --no-kubernetes --kubernetes-version=1.20 --driver=docker  --container-runtime=containerd
no_kubernetes_test.go:83: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p NoKubernetes-930680 --no-kubernetes --kubernetes-version=1.20 --driver=docker  --container-runtime=containerd: exit status 14 (87.729361ms)

                                                
                                                
-- stdout --
	* [NoKubernetes-930680] minikube v1.33.1 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=18859
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to MK_USAGE: cannot specify --kubernetes-version with --no-kubernetes,
	to unset a global config run:
	
	$ minikube config unset kubernetes-version

                                                
                                                
** /stderr **
--- PASS: TestNoKubernetes/serial/StartNoK8sWithVersion (0.09s)

                                                
                                    
x
+
TestNoKubernetes/serial/StartWithK8s (41.73s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/StartWithK8s
no_kubernetes_test.go:95: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-930680 --driver=docker  --container-runtime=containerd
no_kubernetes_test.go:95: (dbg) Done: out/minikube-linux-arm64 start -p NoKubernetes-930680 --driver=docker  --container-runtime=containerd: (41.36099668s)
no_kubernetes_test.go:200: (dbg) Run:  out/minikube-linux-arm64 -p NoKubernetes-930680 status -o json
--- PASS: TestNoKubernetes/serial/StartWithK8s (41.73s)

                                                
                                    
x
+
TestNoKubernetes/serial/StartWithStopK8s (16.29s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/StartWithStopK8s
no_kubernetes_test.go:112: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-930680 --no-kubernetes --driver=docker  --container-runtime=containerd
no_kubernetes_test.go:112: (dbg) Done: out/minikube-linux-arm64 start -p NoKubernetes-930680 --no-kubernetes --driver=docker  --container-runtime=containerd: (14.029918352s)
no_kubernetes_test.go:200: (dbg) Run:  out/minikube-linux-arm64 -p NoKubernetes-930680 status -o json
no_kubernetes_test.go:200: (dbg) Non-zero exit: out/minikube-linux-arm64 -p NoKubernetes-930680 status -o json: exit status 2 (298.548015ms)

                                                
                                                
-- stdout --
	{"Name":"NoKubernetes-930680","Host":"Running","Kubelet":"Stopped","APIServer":"Stopped","Kubeconfig":"Configured","Worker":false}

                                                
                                                
-- /stdout --
no_kubernetes_test.go:124: (dbg) Run:  out/minikube-linux-arm64 delete -p NoKubernetes-930680
no_kubernetes_test.go:124: (dbg) Done: out/minikube-linux-arm64 delete -p NoKubernetes-930680: (1.958142824s)
--- PASS: TestNoKubernetes/serial/StartWithStopK8s (16.29s)

                                                
                                    
x
+
TestNoKubernetes/serial/Start (7.77s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/Start
no_kubernetes_test.go:136: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-930680 --no-kubernetes --driver=docker  --container-runtime=containerd
no_kubernetes_test.go:136: (dbg) Done: out/minikube-linux-arm64 start -p NoKubernetes-930680 --no-kubernetes --driver=docker  --container-runtime=containerd: (7.766182089s)
--- PASS: TestNoKubernetes/serial/Start (7.77s)

                                                
                                    
x
+
TestNoKubernetes/serial/VerifyK8sNotRunning (0.3s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/VerifyK8sNotRunning
no_kubernetes_test.go:147: (dbg) Run:  out/minikube-linux-arm64 ssh -p NoKubernetes-930680 "sudo systemctl is-active --quiet service kubelet"
no_kubernetes_test.go:147: (dbg) Non-zero exit: out/minikube-linux-arm64 ssh -p NoKubernetes-930680 "sudo systemctl is-active --quiet service kubelet": exit status 1 (301.266684ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
--- PASS: TestNoKubernetes/serial/VerifyK8sNotRunning (0.30s)

                                                
                                    
x
+
TestNoKubernetes/serial/ProfileList (1.09s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/ProfileList
no_kubernetes_test.go:169: (dbg) Run:  out/minikube-linux-arm64 profile list
no_kubernetes_test.go:179: (dbg) Run:  out/minikube-linux-arm64 profile list --output=json
--- PASS: TestNoKubernetes/serial/ProfileList (1.09s)

                                                
                                    
x
+
TestNoKubernetes/serial/Stop (1.2s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/Stop
no_kubernetes_test.go:158: (dbg) Run:  out/minikube-linux-arm64 stop -p NoKubernetes-930680
no_kubernetes_test.go:158: (dbg) Done: out/minikube-linux-arm64 stop -p NoKubernetes-930680: (1.199313299s)
--- PASS: TestNoKubernetes/serial/Stop (1.20s)

                                                
                                    
x
+
TestNoKubernetes/serial/StartNoArgs (6.95s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/StartNoArgs
no_kubernetes_test.go:191: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-930680 --driver=docker  --container-runtime=containerd
no_kubernetes_test.go:191: (dbg) Done: out/minikube-linux-arm64 start -p NoKubernetes-930680 --driver=docker  --container-runtime=containerd: (6.949709607s)
--- PASS: TestNoKubernetes/serial/StartNoArgs (6.95s)

                                                
                                    
x
+
TestNoKubernetes/serial/VerifyK8sNotRunningSecond (0.26s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/VerifyK8sNotRunningSecond
no_kubernetes_test.go:147: (dbg) Run:  out/minikube-linux-arm64 ssh -p NoKubernetes-930680 "sudo systemctl is-active --quiet service kubelet"
no_kubernetes_test.go:147: (dbg) Non-zero exit: out/minikube-linux-arm64 ssh -p NoKubernetes-930680 "sudo systemctl is-active --quiet service kubelet": exit status 1 (261.780698ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
--- PASS: TestNoKubernetes/serial/VerifyK8sNotRunningSecond (0.26s)

                                                
                                    
x
+
TestNetworkPlugins/group/false (3.62s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/false
net_test.go:246: (dbg) Run:  out/minikube-linux-arm64 start -p false-104147 --memory=2048 --alsologtostderr --cni=false --driver=docker  --container-runtime=containerd
net_test.go:246: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p false-104147 --memory=2048 --alsologtostderr --cni=false --driver=docker  --container-runtime=containerd: exit status 14 (170.208206ms)

                                                
                                                
-- stdout --
	* [false-104147] minikube v1.33.1 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=18859
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	* Using the docker driver based on user configuration
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0704 02:18:29.738828 1363966 out.go:291] Setting OutFile to fd 1 ...
	I0704 02:18:29.739030 1363966 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 02:18:29.739086 1363966 out.go:304] Setting ErrFile to fd 2...
	I0704 02:18:29.739106 1363966 out.go:338] TERM=,COLORTERM=, which probably does not support color
	I0704 02:18:29.739401 1363966 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18859-1190282/.minikube/bin
	I0704 02:18:29.739913 1363966 out.go:298] Setting JSON to false
	I0704 02:18:29.741008 1363966 start.go:129] hostinfo: {"hostname":"ip-172-31-30-239","uptime":28860,"bootTime":1720030650,"procs":215,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1064-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"92f46a7d-c249-4c12-924a-77f64874c910"}
	I0704 02:18:29.741109 1363966 start.go:139] virtualization:  
	I0704 02:18:29.743810 1363966 out.go:177] * [false-104147] minikube v1.33.1 on Ubuntu 20.04 (arm64)
	I0704 02:18:29.746055 1363966 out.go:177]   - MINIKUBE_LOCATION=18859
	I0704 02:18:29.746119 1363966 notify.go:220] Checking for updates...
	I0704 02:18:29.749458 1363966 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0704 02:18:29.751310 1363966 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/18859-1190282/kubeconfig
	I0704 02:18:29.753317 1363966 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/18859-1190282/.minikube
	I0704 02:18:29.755028 1363966 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0704 02:18:29.756593 1363966 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0704 02:18:29.758990 1363966 config.go:182] Loaded profile config "pause-736265": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.30.2
	I0704 02:18:29.759153 1363966 driver.go:392] Setting default libvirt URI to qemu:///system
	I0704 02:18:29.791453 1363966 docker.go:122] docker version: linux-27.0.3:Docker Engine - Community
	I0704 02:18:29.791644 1363966 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0704 02:18:29.846066 1363966 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:33 OomKillDisable:true NGoroutines:51 SystemTime:2024-07-04 02:18:29.836217477 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1064-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214896640 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:27.0.3 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e Expected:ae71819c4f5e67bb4d5ae76a6b735f29cc25774e} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.15.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.28.1]] Warnings:<nil>}}
	I0704 02:18:29.846180 1363966 docker.go:295] overlay module found
	I0704 02:18:29.848385 1363966 out.go:177] * Using the docker driver based on user configuration
	I0704 02:18:29.850104 1363966 start.go:297] selected driver: docker
	I0704 02:18:29.850126 1363966 start.go:901] validating driver "docker" against <nil>
	I0704 02:18:29.850141 1363966 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0704 02:18:29.852873 1363966 out.go:177] 
	W0704 02:18:29.855023 1363966 out.go:239] X Exiting due to MK_USAGE: The "containerd" container runtime requires CNI
	X Exiting due to MK_USAGE: The "containerd" container runtime requires CNI
	I0704 02:18:29.856936 1363966 out.go:177] 

                                                
                                                
** /stderr **
net_test.go:88: 
----------------------- debugLogs start: false-104147 [pass: true] --------------------------------
>>> netcat: nslookup kubernetes.default:
Error in configuration: context was not found for specified context: false-104147

                                                
                                                

                                                
                                                
>>> netcat: nslookup debug kubernetes.default a-records:
Error in configuration: context was not found for specified context: false-104147

                                                
                                                

                                                
                                                
>>> netcat: dig search kubernetes.default:
Error in configuration: context was not found for specified context: false-104147

                                                
                                                

                                                
                                                
>>> netcat: dig @10.96.0.10 kubernetes.default.svc.cluster.local udp/53:
Error in configuration: context was not found for specified context: false-104147

                                                
                                                

                                                
                                                
>>> netcat: dig @10.96.0.10 kubernetes.default.svc.cluster.local tcp/53:
Error in configuration: context was not found for specified context: false-104147

                                                
                                                

                                                
                                                
>>> netcat: nc 10.96.0.10 udp/53:
Error in configuration: context was not found for specified context: false-104147

                                                
                                                

                                                
                                                
>>> netcat: nc 10.96.0.10 tcp/53:
Error in configuration: context was not found for specified context: false-104147

                                                
                                                

                                                
                                                
>>> netcat: /etc/nsswitch.conf:
Error in configuration: context was not found for specified context: false-104147

                                                
                                                

                                                
                                                
>>> netcat: /etc/hosts:
Error in configuration: context was not found for specified context: false-104147

                                                
                                                

                                                
                                                
>>> netcat: /etc/resolv.conf:
Error in configuration: context was not found for specified context: false-104147

                                                
                                                

                                                
                                                
>>> host: /etc/nsswitch.conf:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/hosts:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/resolv.conf:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> k8s: nodes, services, endpoints, daemon sets, deployments and pods, :
Error in configuration: context was not found for specified context: false-104147

                                                
                                                

                                                
                                                
>>> host: crictl pods:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: crictl containers:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> k8s: describe netcat deployment:
error: context "false-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: describe netcat pod(s):
error: context "false-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: netcat logs:
error: context "false-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: describe coredns deployment:
error: context "false-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: describe coredns pods:
error: context "false-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: coredns logs:
error: context "false-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: describe api server pod(s):
error: context "false-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: api server logs:
error: context "false-104147" does not exist

                                                
                                                

                                                
                                                
>>> host: /etc/cni:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: ip a s:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: ip r s:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: iptables-save:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: iptables table nat:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> k8s: describe kube-proxy daemon set:
error: context "false-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: describe kube-proxy pod(s):
error: context "false-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: kube-proxy logs:
error: context "false-104147" does not exist

                                                
                                                

                                                
                                                
>>> host: kubelet daemon status:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: kubelet daemon config:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> k8s: kubelet logs:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/kubernetes/kubelet.conf:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: /var/lib/kubelet/config.yaml:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> k8s: kubectl config:
apiVersion: v1
clusters:
- cluster:
certificate-authority: /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt
extensions:
- extension:
last-update: Thu, 04 Jul 2024 02:17:59 UTC
provider: minikube.sigs.k8s.io
version: v1.33.1
name: cluster_info
server: https://192.168.76.2:8443
name: pause-736265
contexts:
- context:
cluster: pause-736265
extensions:
- extension:
last-update: Thu, 04 Jul 2024 02:17:59 UTC
provider: minikube.sigs.k8s.io
version: v1.33.1
name: context_info
namespace: default
user: pause-736265
name: pause-736265
current-context: pause-736265
kind: Config
preferences: {}
users:
- name: pause-736265
user:
client-certificate: /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/pause-736265/client.crt
client-key: /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/pause-736265/client.key

                                                
                                                

                                                
                                                
>>> k8s: cms:
Error in configuration: context was not found for specified context: false-104147

                                                
                                                

                                                
                                                
>>> host: docker daemon status:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: docker daemon config:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/docker/daemon.json:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: docker system info:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: cri-docker daemon status:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: cri-docker daemon config:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/systemd/system/cri-docker.service.d/10-cni.conf:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: /usr/lib/systemd/system/cri-docker.service:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: cri-dockerd version:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: containerd daemon status:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: containerd daemon config:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: /lib/systemd/system/containerd.service:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/containerd/config.toml:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: containerd config dump:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: crio daemon status:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: crio daemon config:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/crio:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                

                                                
                                                
>>> host: crio config:
* Profile "false-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p false-104147"

                                                
                                                
----------------------- debugLogs end: false-104147 [took: 3.298623956s] --------------------------------
helpers_test.go:175: Cleaning up "false-104147" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p false-104147
--- PASS: TestNetworkPlugins/group/false (3.62s)

                                                
                                    
x
+
TestPause/serial/SecondStartNoReconfiguration (7.53s)

                                                
                                                
=== RUN   TestPause/serial/SecondStartNoReconfiguration
pause_test.go:92: (dbg) Run:  out/minikube-linux-arm64 start -p pause-736265 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
pause_test.go:92: (dbg) Done: out/minikube-linux-arm64 start -p pause-736265 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (7.510507145s)
--- PASS: TestPause/serial/SecondStartNoReconfiguration (7.53s)

                                                
                                    
x
+
TestPause/serial/Pause (0.88s)

                                                
                                                
=== RUN   TestPause/serial/Pause
pause_test.go:110: (dbg) Run:  out/minikube-linux-arm64 pause -p pause-736265 --alsologtostderr -v=5
--- PASS: TestPause/serial/Pause (0.88s)

                                                
                                    
x
+
TestPause/serial/VerifyStatus (0.38s)

                                                
                                                
=== RUN   TestPause/serial/VerifyStatus
status_test.go:76: (dbg) Run:  out/minikube-linux-arm64 status -p pause-736265 --output=json --layout=cluster
status_test.go:76: (dbg) Non-zero exit: out/minikube-linux-arm64 status -p pause-736265 --output=json --layout=cluster: exit status 2 (383.646333ms)

                                                
                                                
-- stdout --
	{"Name":"pause-736265","StatusCode":418,"StatusName":"Paused","Step":"Done","StepDetail":"* Paused 7 containers in: kube-system, kubernetes-dashboard, storage-gluster, istio-operator","BinaryVersion":"v1.33.1","Components":{"kubeconfig":{"Name":"kubeconfig","StatusCode":200,"StatusName":"OK"}},"Nodes":[{"Name":"pause-736265","StatusCode":200,"StatusName":"OK","Components":{"apiserver":{"Name":"apiserver","StatusCode":418,"StatusName":"Paused"},"kubelet":{"Name":"kubelet","StatusCode":405,"StatusName":"Stopped"}}}]}

                                                
                                                
-- /stdout --
--- PASS: TestPause/serial/VerifyStatus (0.38s)

                                                
                                    
x
+
TestPause/serial/Unpause (0.93s)

                                                
                                                
=== RUN   TestPause/serial/Unpause
pause_test.go:121: (dbg) Run:  out/minikube-linux-arm64 unpause -p pause-736265 --alsologtostderr -v=5
--- PASS: TestPause/serial/Unpause (0.93s)

                                                
                                    
x
+
TestPause/serial/PauseAgain (1.08s)

                                                
                                                
=== RUN   TestPause/serial/PauseAgain
pause_test.go:110: (dbg) Run:  out/minikube-linux-arm64 pause -p pause-736265 --alsologtostderr -v=5
pause_test.go:110: (dbg) Done: out/minikube-linux-arm64 pause -p pause-736265 --alsologtostderr -v=5: (1.082350591s)
--- PASS: TestPause/serial/PauseAgain (1.08s)

                                                
                                    
x
+
TestPause/serial/DeletePaused (3.05s)

                                                
                                                
=== RUN   TestPause/serial/DeletePaused
pause_test.go:132: (dbg) Run:  out/minikube-linux-arm64 delete -p pause-736265 --alsologtostderr -v=5
pause_test.go:132: (dbg) Done: out/minikube-linux-arm64 delete -p pause-736265 --alsologtostderr -v=5: (3.052464517s)
--- PASS: TestPause/serial/DeletePaused (3.05s)

                                                
                                    
x
+
TestPause/serial/VerifyDeletedResources (6.18s)

                                                
                                                
=== RUN   TestPause/serial/VerifyDeletedResources
pause_test.go:142: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
pause_test.go:142: (dbg) Done: out/minikube-linux-arm64 profile list --output json: (6.10702206s)
pause_test.go:168: (dbg) Run:  docker ps -a
pause_test.go:173: (dbg) Run:  docker volume inspect pause-736265
pause_test.go:173: (dbg) Non-zero exit: docker volume inspect pause-736265: exit status 1 (23.74709ms)

                                                
                                                
-- stdout --
	[]

                                                
                                                
-- /stdout --
** stderr ** 
	Error response from daemon: get pause-736265: no such volume

                                                
                                                
** /stderr **
pause_test.go:178: (dbg) Run:  docker network ls
--- PASS: TestPause/serial/VerifyDeletedResources (6.18s)

                                                
                                    
x
+
TestStoppedBinaryUpgrade/Setup (0.7s)

                                                
                                                
=== RUN   TestStoppedBinaryUpgrade/Setup
--- PASS: TestStoppedBinaryUpgrade/Setup (0.70s)

                                                
                                    
x
+
TestStoppedBinaryUpgrade/Upgrade (121.83s)

                                                
                                                
=== RUN   TestStoppedBinaryUpgrade/Upgrade
version_upgrade_test.go:183: (dbg) Run:  /tmp/minikube-v1.26.0.1868020015 start -p stopped-upgrade-245843 --memory=2200 --vm-driver=docker  --container-runtime=containerd
version_upgrade_test.go:183: (dbg) Done: /tmp/minikube-v1.26.0.1868020015 start -p stopped-upgrade-245843 --memory=2200 --vm-driver=docker  --container-runtime=containerd: (57.563428723s)
version_upgrade_test.go:192: (dbg) Run:  /tmp/minikube-v1.26.0.1868020015 -p stopped-upgrade-245843 stop
version_upgrade_test.go:192: (dbg) Done: /tmp/minikube-v1.26.0.1868020015 -p stopped-upgrade-245843 stop: (1.346811707s)
version_upgrade_test.go:198: (dbg) Run:  out/minikube-linux-arm64 start -p stopped-upgrade-245843 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
version_upgrade_test.go:198: (dbg) Done: out/minikube-linux-arm64 start -p stopped-upgrade-245843 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (1m2.921389199s)
--- PASS: TestStoppedBinaryUpgrade/Upgrade (121.83s)

                                                
                                    
x
+
TestNetworkPlugins/group/auto/Start (92.4s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/auto/Start
net_test.go:112: (dbg) Run:  out/minikube-linux-arm64 start -p auto-104147 --memory=3072 --alsologtostderr --wait=true --wait-timeout=15m --driver=docker  --container-runtime=containerd
net_test.go:112: (dbg) Done: out/minikube-linux-arm64 start -p auto-104147 --memory=3072 --alsologtostderr --wait=true --wait-timeout=15m --driver=docker  --container-runtime=containerd: (1m32.39778808s)
--- PASS: TestNetworkPlugins/group/auto/Start (92.40s)

                                                
                                    
x
+
TestStoppedBinaryUpgrade/MinikubeLogs (1.6s)

                                                
                                                
=== RUN   TestStoppedBinaryUpgrade/MinikubeLogs
version_upgrade_test.go:206: (dbg) Run:  out/minikube-linux-arm64 logs -p stopped-upgrade-245843
version_upgrade_test.go:206: (dbg) Done: out/minikube-linux-arm64 logs -p stopped-upgrade-245843: (1.604678179s)
--- PASS: TestStoppedBinaryUpgrade/MinikubeLogs (1.60s)

                                                
                                    
x
+
TestNetworkPlugins/group/kindnet/Start (88.93s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/kindnet/Start
net_test.go:112: (dbg) Run:  out/minikube-linux-arm64 start -p kindnet-104147 --memory=3072 --alsologtostderr --wait=true --wait-timeout=15m --cni=kindnet --driver=docker  --container-runtime=containerd
net_test.go:112: (dbg) Done: out/minikube-linux-arm64 start -p kindnet-104147 --memory=3072 --alsologtostderr --wait=true --wait-timeout=15m --cni=kindnet --driver=docker  --container-runtime=containerd: (1m28.932192947s)
--- PASS: TestNetworkPlugins/group/kindnet/Start (88.93s)

                                                
                                    
x
+
TestNetworkPlugins/group/auto/KubeletFlags (0.29s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/auto/KubeletFlags
net_test.go:133: (dbg) Run:  out/minikube-linux-arm64 ssh -p auto-104147 "pgrep -a kubelet"
--- PASS: TestNetworkPlugins/group/auto/KubeletFlags (0.29s)

                                                
                                    
x
+
TestNetworkPlugins/group/auto/NetCatPod (8.35s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/auto/NetCatPod
net_test.go:149: (dbg) Run:  kubectl --context auto-104147 replace --force -f testdata/netcat-deployment.yaml
net_test.go:163: (dbg) TestNetworkPlugins/group/auto/NetCatPod: waiting 15m0s for pods matching "app=netcat" in namespace "default" ...
helpers_test.go:344: "netcat-6bc787d567-v8h8v" [0346a61e-5c0d-49b3-91bd-b20ce0a6c51f] Pending / Ready:ContainersNotReady (containers with unready status: [dnsutils]) / ContainersReady:ContainersNotReady (containers with unready status: [dnsutils])
helpers_test.go:344: "netcat-6bc787d567-v8h8v" [0346a61e-5c0d-49b3-91bd-b20ce0a6c51f] Running
net_test.go:163: (dbg) TestNetworkPlugins/group/auto/NetCatPod: app=netcat healthy within 8.005307049s
--- PASS: TestNetworkPlugins/group/auto/NetCatPod (8.35s)

                                                
                                    
x
+
TestNetworkPlugins/group/auto/DNS (0.23s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/auto/DNS
net_test.go:175: (dbg) Run:  kubectl --context auto-104147 exec deployment/netcat -- nslookup kubernetes.default
--- PASS: TestNetworkPlugins/group/auto/DNS (0.23s)

                                                
                                    
x
+
TestNetworkPlugins/group/auto/Localhost (0.16s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/auto/Localhost
net_test.go:194: (dbg) Run:  kubectl --context auto-104147 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z localhost 8080"
--- PASS: TestNetworkPlugins/group/auto/Localhost (0.16s)

                                                
                                    
x
+
TestNetworkPlugins/group/auto/HairPin (0.16s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/auto/HairPin
net_test.go:264: (dbg) Run:  kubectl --context auto-104147 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z netcat 8080"
--- PASS: TestNetworkPlugins/group/auto/HairPin (0.16s)

                                                
                                    
x
+
TestNetworkPlugins/group/calico/Start (78.93s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/calico/Start
net_test.go:112: (dbg) Run:  out/minikube-linux-arm64 start -p calico-104147 --memory=3072 --alsologtostderr --wait=true --wait-timeout=15m --cni=calico --driver=docker  --container-runtime=containerd
net_test.go:112: (dbg) Done: out/minikube-linux-arm64 start -p calico-104147 --memory=3072 --alsologtostderr --wait=true --wait-timeout=15m --cni=calico --driver=docker  --container-runtime=containerd: (1m18.933826048s)
--- PASS: TestNetworkPlugins/group/calico/Start (78.93s)

                                                
                                    
x
+
TestNetworkPlugins/group/kindnet/ControllerPod (6.01s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/kindnet/ControllerPod
net_test.go:120: (dbg) TestNetworkPlugins/group/kindnet/ControllerPod: waiting 10m0s for pods matching "app=kindnet" in namespace "kube-system" ...
helpers_test.go:344: "kindnet-7p4bt" [f6ca3170-9412-4607-beac-89b55c36bf47] Running
net_test.go:120: (dbg) TestNetworkPlugins/group/kindnet/ControllerPod: app=kindnet healthy within 6.005626957s
--- PASS: TestNetworkPlugins/group/kindnet/ControllerPod (6.01s)

                                                
                                    
x
+
TestNetworkPlugins/group/kindnet/KubeletFlags (0.44s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/kindnet/KubeletFlags
net_test.go:133: (dbg) Run:  out/minikube-linux-arm64 ssh -p kindnet-104147 "pgrep -a kubelet"
--- PASS: TestNetworkPlugins/group/kindnet/KubeletFlags (0.44s)

                                                
                                    
x
+
TestNetworkPlugins/group/kindnet/NetCatPod (10.42s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/kindnet/NetCatPod
net_test.go:149: (dbg) Run:  kubectl --context kindnet-104147 replace --force -f testdata/netcat-deployment.yaml
net_test.go:163: (dbg) TestNetworkPlugins/group/kindnet/NetCatPod: waiting 15m0s for pods matching "app=netcat" in namespace "default" ...
helpers_test.go:344: "netcat-6bc787d567-jk7hh" [a593e9ae-2e1d-463d-a6c5-30c3381b29c8] Pending / Ready:ContainersNotReady (containers with unready status: [dnsutils]) / ContainersReady:ContainersNotReady (containers with unready status: [dnsutils])
helpers_test.go:344: "netcat-6bc787d567-jk7hh" [a593e9ae-2e1d-463d-a6c5-30c3381b29c8] Running
E0704 02:29:54.694424 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
net_test.go:163: (dbg) TestNetworkPlugins/group/kindnet/NetCatPod: app=netcat healthy within 10.004318453s
--- PASS: TestNetworkPlugins/group/kindnet/NetCatPod (10.42s)

                                                
                                    
x
+
TestNetworkPlugins/group/kindnet/DNS (0.19s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/kindnet/DNS
net_test.go:175: (dbg) Run:  kubectl --context kindnet-104147 exec deployment/netcat -- nslookup kubernetes.default
--- PASS: TestNetworkPlugins/group/kindnet/DNS (0.19s)

                                                
                                    
x
+
TestNetworkPlugins/group/kindnet/Localhost (0.21s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/kindnet/Localhost
net_test.go:194: (dbg) Run:  kubectl --context kindnet-104147 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z localhost 8080"
--- PASS: TestNetworkPlugins/group/kindnet/Localhost (0.21s)

                                                
                                    
x
+
TestNetworkPlugins/group/kindnet/HairPin (0.21s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/kindnet/HairPin
net_test.go:264: (dbg) Run:  kubectl --context kindnet-104147 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z netcat 8080"
--- PASS: TestNetworkPlugins/group/kindnet/HairPin (0.21s)

                                                
                                    
x
+
TestNetworkPlugins/group/custom-flannel/Start (63.97s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/custom-flannel/Start
net_test.go:112: (dbg) Run:  out/minikube-linux-arm64 start -p custom-flannel-104147 --memory=3072 --alsologtostderr --wait=true --wait-timeout=15m --cni=testdata/kube-flannel.yaml --driver=docker  --container-runtime=containerd
net_test.go:112: (dbg) Done: out/minikube-linux-arm64 start -p custom-flannel-104147 --memory=3072 --alsologtostderr --wait=true --wait-timeout=15m --cni=testdata/kube-flannel.yaml --driver=docker  --container-runtime=containerd: (1m3.973877234s)
--- PASS: TestNetworkPlugins/group/custom-flannel/Start (63.97s)

                                                
                                    
x
+
TestNetworkPlugins/group/calico/ControllerPod (6.01s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/calico/ControllerPod
net_test.go:120: (dbg) TestNetworkPlugins/group/calico/ControllerPod: waiting 10m0s for pods matching "k8s-app=calico-node" in namespace "kube-system" ...
helpers_test.go:344: "calico-node-f4vww" [121870e1-ffe2-4b03-84f7-1e2272a09dc8] Running
net_test.go:120: (dbg) TestNetworkPlugins/group/calico/ControllerPod: k8s-app=calico-node healthy within 6.005371769s
--- PASS: TestNetworkPlugins/group/calico/ControllerPod (6.01s)

                                                
                                    
x
+
TestNetworkPlugins/group/calico/KubeletFlags (0.44s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/calico/KubeletFlags
net_test.go:133: (dbg) Run:  out/minikube-linux-arm64 ssh -p calico-104147 "pgrep -a kubelet"
--- PASS: TestNetworkPlugins/group/calico/KubeletFlags (0.44s)

                                                
                                    
x
+
TestNetworkPlugins/group/calico/NetCatPod (11.34s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/calico/NetCatPod
net_test.go:149: (dbg) Run:  kubectl --context calico-104147 replace --force -f testdata/netcat-deployment.yaml
net_test.go:163: (dbg) TestNetworkPlugins/group/calico/NetCatPod: waiting 15m0s for pods matching "app=netcat" in namespace "default" ...
helpers_test.go:344: "netcat-6bc787d567-s6cht" [cd9c6498-7555-4254-8459-d3cbca2ba708] Pending / Ready:ContainersNotReady (containers with unready status: [dnsutils]) / ContainersReady:ContainersNotReady (containers with unready status: [dnsutils])
helpers_test.go:344: "netcat-6bc787d567-s6cht" [cd9c6498-7555-4254-8459-d3cbca2ba708] Running
E0704 02:30:53.143751 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
net_test.go:163: (dbg) TestNetworkPlugins/group/calico/NetCatPod: app=netcat healthy within 11.003928627s
--- PASS: TestNetworkPlugins/group/calico/NetCatPod (11.34s)

                                                
                                    
x
+
TestNetworkPlugins/group/calico/DNS (0.32s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/calico/DNS
net_test.go:175: (dbg) Run:  kubectl --context calico-104147 exec deployment/netcat -- nslookup kubernetes.default
--- PASS: TestNetworkPlugins/group/calico/DNS (0.32s)

                                                
                                    
x
+
TestNetworkPlugins/group/calico/Localhost (0.2s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/calico/Localhost
net_test.go:194: (dbg) Run:  kubectl --context calico-104147 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z localhost 8080"
--- PASS: TestNetworkPlugins/group/calico/Localhost (0.20s)

                                                
                                    
x
+
TestNetworkPlugins/group/calico/HairPin (0.21s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/calico/HairPin
net_test.go:264: (dbg) Run:  kubectl --context calico-104147 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z netcat 8080"
--- PASS: TestNetworkPlugins/group/calico/HairPin (0.21s)

                                                
                                    
x
+
TestNetworkPlugins/group/enable-default-cni/Start (56.32s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/enable-default-cni/Start
net_test.go:112: (dbg) Run:  out/minikube-linux-arm64 start -p enable-default-cni-104147 --memory=3072 --alsologtostderr --wait=true --wait-timeout=15m --enable-default-cni=true --driver=docker  --container-runtime=containerd
net_test.go:112: (dbg) Done: out/minikube-linux-arm64 start -p enable-default-cni-104147 --memory=3072 --alsologtostderr --wait=true --wait-timeout=15m --enable-default-cni=true --driver=docker  --container-runtime=containerd: (56.315846374s)
--- PASS: TestNetworkPlugins/group/enable-default-cni/Start (56.32s)

                                                
                                    
x
+
TestNetworkPlugins/group/custom-flannel/KubeletFlags (0.29s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/custom-flannel/KubeletFlags
net_test.go:133: (dbg) Run:  out/minikube-linux-arm64 ssh -p custom-flannel-104147 "pgrep -a kubelet"
--- PASS: TestNetworkPlugins/group/custom-flannel/KubeletFlags (0.29s)

                                                
                                    
x
+
TestNetworkPlugins/group/custom-flannel/NetCatPod (10.27s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/custom-flannel/NetCatPod
net_test.go:149: (dbg) Run:  kubectl --context custom-flannel-104147 replace --force -f testdata/netcat-deployment.yaml
net_test.go:163: (dbg) TestNetworkPlugins/group/custom-flannel/NetCatPod: waiting 15m0s for pods matching "app=netcat" in namespace "default" ...
helpers_test.go:344: "netcat-6bc787d567-5qmn7" [8260b116-ecb0-4c07-a346-ca399020459f] Pending / Ready:ContainersNotReady (containers with unready status: [dnsutils]) / ContainersReady:ContainersNotReady (containers with unready status: [dnsutils])
helpers_test.go:344: "netcat-6bc787d567-5qmn7" [8260b116-ecb0-4c07-a346-ca399020459f] Running
net_test.go:163: (dbg) TestNetworkPlugins/group/custom-flannel/NetCatPod: app=netcat healthy within 10.007332847s
--- PASS: TestNetworkPlugins/group/custom-flannel/NetCatPod (10.27s)

                                                
                                    
x
+
TestNetworkPlugins/group/custom-flannel/DNS (0.22s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/custom-flannel/DNS
net_test.go:175: (dbg) Run:  kubectl --context custom-flannel-104147 exec deployment/netcat -- nslookup kubernetes.default
--- PASS: TestNetworkPlugins/group/custom-flannel/DNS (0.22s)

                                                
                                    
x
+
TestNetworkPlugins/group/custom-flannel/Localhost (0.2s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/custom-flannel/Localhost
net_test.go:194: (dbg) Run:  kubectl --context custom-flannel-104147 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z localhost 8080"
--- PASS: TestNetworkPlugins/group/custom-flannel/Localhost (0.20s)

                                                
                                    
x
+
TestNetworkPlugins/group/custom-flannel/HairPin (0.22s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/custom-flannel/HairPin
net_test.go:264: (dbg) Run:  kubectl --context custom-flannel-104147 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z netcat 8080"
--- PASS: TestNetworkPlugins/group/custom-flannel/HairPin (0.22s)

                                                
                                    
x
+
TestNetworkPlugins/group/flannel/Start (68.37s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/flannel/Start
net_test.go:112: (dbg) Run:  out/minikube-linux-arm64 start -p flannel-104147 --memory=3072 --alsologtostderr --wait=true --wait-timeout=15m --cni=flannel --driver=docker  --container-runtime=containerd
net_test.go:112: (dbg) Done: out/minikube-linux-arm64 start -p flannel-104147 --memory=3072 --alsologtostderr --wait=true --wait-timeout=15m --cni=flannel --driver=docker  --container-runtime=containerd: (1m8.374734884s)
--- PASS: TestNetworkPlugins/group/flannel/Start (68.37s)

                                                
                                    
x
+
TestNetworkPlugins/group/enable-default-cni/KubeletFlags (0.37s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/enable-default-cni/KubeletFlags
net_test.go:133: (dbg) Run:  out/minikube-linux-arm64 ssh -p enable-default-cni-104147 "pgrep -a kubelet"
--- PASS: TestNetworkPlugins/group/enable-default-cni/KubeletFlags (0.37s)

                                                
                                    
x
+
TestNetworkPlugins/group/enable-default-cni/NetCatPod (11.35s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/enable-default-cni/NetCatPod
net_test.go:149: (dbg) Run:  kubectl --context enable-default-cni-104147 replace --force -f testdata/netcat-deployment.yaml
net_test.go:163: (dbg) TestNetworkPlugins/group/enable-default-cni/NetCatPod: waiting 15m0s for pods matching "app=netcat" in namespace "default" ...
helpers_test.go:344: "netcat-6bc787d567-bx8vs" [7c909237-747c-4bca-a360-7bcefb857dea] Pending / Ready:ContainersNotReady (containers with unready status: [dnsutils]) / ContainersReady:ContainersNotReady (containers with unready status: [dnsutils])
helpers_test.go:344: "netcat-6bc787d567-bx8vs" [7c909237-747c-4bca-a360-7bcefb857dea] Running
net_test.go:163: (dbg) TestNetworkPlugins/group/enable-default-cni/NetCatPod: app=netcat healthy within 11.00427112s
--- PASS: TestNetworkPlugins/group/enable-default-cni/NetCatPod (11.35s)

                                                
                                    
x
+
TestNetworkPlugins/group/enable-default-cni/DNS (0.22s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/enable-default-cni/DNS
net_test.go:175: (dbg) Run:  kubectl --context enable-default-cni-104147 exec deployment/netcat -- nslookup kubernetes.default
--- PASS: TestNetworkPlugins/group/enable-default-cni/DNS (0.22s)

                                                
                                    
x
+
TestNetworkPlugins/group/enable-default-cni/Localhost (0.28s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/enable-default-cni/Localhost
net_test.go:194: (dbg) Run:  kubectl --context enable-default-cni-104147 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z localhost 8080"
--- PASS: TestNetworkPlugins/group/enable-default-cni/Localhost (0.28s)

                                                
                                    
x
+
TestNetworkPlugins/group/enable-default-cni/HairPin (0.19s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/enable-default-cni/HairPin
net_test.go:264: (dbg) Run:  kubectl --context enable-default-cni-104147 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z netcat 8080"
--- PASS: TestNetworkPlugins/group/enable-default-cni/HairPin (0.19s)

                                                
                                    
x
+
TestNetworkPlugins/group/bridge/Start (49.82s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/bridge/Start
net_test.go:112: (dbg) Run:  out/minikube-linux-arm64 start -p bridge-104147 --memory=3072 --alsologtostderr --wait=true --wait-timeout=15m --cni=bridge --driver=docker  --container-runtime=containerd
net_test.go:112: (dbg) Done: out/minikube-linux-arm64 start -p bridge-104147 --memory=3072 --alsologtostderr --wait=true --wait-timeout=15m --cni=bridge --driver=docker  --container-runtime=containerd: (49.824175995s)
--- PASS: TestNetworkPlugins/group/bridge/Start (49.82s)

                                                
                                    
x
+
TestNetworkPlugins/group/flannel/ControllerPod (6.01s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/flannel/ControllerPod
net_test.go:120: (dbg) TestNetworkPlugins/group/flannel/ControllerPod: waiting 10m0s for pods matching "app=flannel" in namespace "kube-flannel" ...
helpers_test.go:344: "kube-flannel-ds-5sx6s" [ed1a33ec-eb40-474c-bf6d-0451eb683ee8] Running
net_test.go:120: (dbg) TestNetworkPlugins/group/flannel/ControllerPod: app=flannel healthy within 6.003903263s
--- PASS: TestNetworkPlugins/group/flannel/ControllerPod (6.01s)

                                                
                                    
x
+
TestNetworkPlugins/group/flannel/KubeletFlags (0.32s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/flannel/KubeletFlags
net_test.go:133: (dbg) Run:  out/minikube-linux-arm64 ssh -p flannel-104147 "pgrep -a kubelet"
--- PASS: TestNetworkPlugins/group/flannel/KubeletFlags (0.32s)

                                                
                                    
x
+
TestNetworkPlugins/group/flannel/NetCatPod (11.4s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/flannel/NetCatPod
net_test.go:149: (dbg) Run:  kubectl --context flannel-104147 replace --force -f testdata/netcat-deployment.yaml
net_test.go:163: (dbg) TestNetworkPlugins/group/flannel/NetCatPod: waiting 15m0s for pods matching "app=netcat" in namespace "default" ...
helpers_test.go:344: "netcat-6bc787d567-2j8gt" [01418dd0-6174-4ac4-9d9b-ac517b3ba4a7] Pending / Ready:ContainersNotReady (containers with unready status: [dnsutils]) / ContainersReady:ContainersNotReady (containers with unready status: [dnsutils])
helpers_test.go:344: "netcat-6bc787d567-2j8gt" [01418dd0-6174-4ac4-9d9b-ac517b3ba4a7] Running
net_test.go:163: (dbg) TestNetworkPlugins/group/flannel/NetCatPod: app=netcat healthy within 11.032852612s
--- PASS: TestNetworkPlugins/group/flannel/NetCatPod (11.40s)

                                                
                                    
x
+
TestNetworkPlugins/group/flannel/DNS (0.21s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/flannel/DNS
net_test.go:175: (dbg) Run:  kubectl --context flannel-104147 exec deployment/netcat -- nslookup kubernetes.default
--- PASS: TestNetworkPlugins/group/flannel/DNS (0.21s)

                                                
                                    
x
+
TestNetworkPlugins/group/flannel/Localhost (0.25s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/flannel/Localhost
net_test.go:194: (dbg) Run:  kubectl --context flannel-104147 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z localhost 8080"
--- PASS: TestNetworkPlugins/group/flannel/Localhost (0.25s)

                                                
                                    
x
+
TestNetworkPlugins/group/flannel/HairPin (0.16s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/flannel/HairPin
net_test.go:264: (dbg) Run:  kubectl --context flannel-104147 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z netcat 8080"
--- PASS: TestNetworkPlugins/group/flannel/HairPin (0.16s)

                                                
                                    
x
+
TestNetworkPlugins/group/bridge/KubeletFlags (0.41s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/bridge/KubeletFlags
net_test.go:133: (dbg) Run:  out/minikube-linux-arm64 ssh -p bridge-104147 "pgrep -a kubelet"
--- PASS: TestNetworkPlugins/group/bridge/KubeletFlags (0.41s)

                                                
                                    
x
+
TestNetworkPlugins/group/bridge/NetCatPod (10.49s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/bridge/NetCatPod
net_test.go:149: (dbg) Run:  kubectl --context bridge-104147 replace --force -f testdata/netcat-deployment.yaml
net_test.go:163: (dbg) TestNetworkPlugins/group/bridge/NetCatPod: waiting 15m0s for pods matching "app=netcat" in namespace "default" ...
helpers_test.go:344: "netcat-6bc787d567-7g4n9" [a40f1c62-ae47-486a-a89c-2cb7c78d1307] Pending / Ready:ContainersNotReady (containers with unready status: [dnsutils]) / ContainersReady:ContainersNotReady (containers with unready status: [dnsutils])
helpers_test.go:344: "netcat-6bc787d567-7g4n9" [a40f1c62-ae47-486a-a89c-2cb7c78d1307] Running
E0704 02:33:48.205315 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/auto-104147/client.crt: no such file or directory
E0704 02:33:48.210570 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/auto-104147/client.crt: no such file or directory
E0704 02:33:48.220818 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/auto-104147/client.crt: no such file or directory
E0704 02:33:48.241061 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/auto-104147/client.crt: no such file or directory
E0704 02:33:48.281332 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/auto-104147/client.crt: no such file or directory
E0704 02:33:48.361604 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/auto-104147/client.crt: no such file or directory
E0704 02:33:48.521993 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/auto-104147/client.crt: no such file or directory
E0704 02:33:48.843128 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/auto-104147/client.crt: no such file or directory
E0704 02:33:49.483803 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/auto-104147/client.crt: no such file or directory
E0704 02:33:50.764436 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/auto-104147/client.crt: no such file or directory
net_test.go:163: (dbg) TestNetworkPlugins/group/bridge/NetCatPod: app=netcat healthy within 10.00440369s
--- PASS: TestNetworkPlugins/group/bridge/NetCatPod (10.49s)

                                                
                                    
x
+
TestNetworkPlugins/group/bridge/DNS (0.28s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/bridge/DNS
net_test.go:175: (dbg) Run:  kubectl --context bridge-104147 exec deployment/netcat -- nslookup kubernetes.default
--- PASS: TestNetworkPlugins/group/bridge/DNS (0.28s)

                                                
                                    
x
+
TestNetworkPlugins/group/bridge/Localhost (0.2s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/bridge/Localhost
net_test.go:194: (dbg) Run:  kubectl --context bridge-104147 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z localhost 8080"
--- PASS: TestNetworkPlugins/group/bridge/Localhost (0.20s)

                                                
                                    
x
+
TestNetworkPlugins/group/bridge/HairPin (0.2s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/bridge/HairPin
net_test.go:264: (dbg) Run:  kubectl --context bridge-104147 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z netcat 8080"
--- PASS: TestNetworkPlugins/group/bridge/HairPin (0.20s)
E0704 02:48:41.541507 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
E0704 02:48:48.205400 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/auto-104147/client.crt: no such file or directory
E0704 02:49:41.024457 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/client.crt: no such file or directory
E0704 02:49:41.788252 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/FirstStart (182.43s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/FirstStart
start_stop_delete_test.go:186: (dbg) Run:  out/minikube-linux-arm64 start -p old-k8s-version-610521 --memory=2200 --alsologtostderr --wait=true --kvm-network=default --kvm-qemu-uri=qemu:///system --disable-driver-mounts --keep-context=false --driver=docker  --container-runtime=containerd --kubernetes-version=v1.20.0
E0704 02:33:58.445365 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/auto-104147/client.crt: no such file or directory
E0704 02:34:08.686024 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/auto-104147/client.crt: no such file or directory
start_stop_delete_test.go:186: (dbg) Done: out/minikube-linux-arm64 start -p old-k8s-version-610521 --memory=2200 --alsologtostderr --wait=true --kvm-network=default --kvm-qemu-uri=qemu:///system --disable-driver-mounts --keep-context=false --driver=docker  --container-runtime=containerd --kubernetes-version=v1.20.0: (3m2.429201772s)
--- PASS: TestStartStop/group/old-k8s-version/serial/FirstStart (182.43s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/FirstStart (72.92s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/FirstStart
start_stop_delete_test.go:186: (dbg) Run:  out/minikube-linux-arm64 start -p no-preload-434675 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=docker  --container-runtime=containerd --kubernetes-version=v1.30.2
E0704 02:34:29.166238 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/auto-104147/client.crt: no such file or directory
E0704 02:34:41.788772 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
E0704 02:34:41.794076 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
E0704 02:34:41.804364 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
E0704 02:34:41.824779 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
E0704 02:34:41.865894 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
E0704 02:34:41.946266 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
E0704 02:34:42.107290 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
E0704 02:34:42.427911 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
E0704 02:34:43.068745 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
E0704 02:34:44.349392 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
E0704 02:34:46.909694 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
E0704 02:34:52.029976 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
E0704 02:35:02.270282 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
E0704 02:35:10.126686 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/auto-104147/client.crt: no such file or directory
E0704 02:35:22.750709 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
start_stop_delete_test.go:186: (dbg) Done: out/minikube-linux-arm64 start -p no-preload-434675 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=docker  --container-runtime=containerd --kubernetes-version=v1.30.2: (1m12.917023877s)
--- PASS: TestStartStop/group/no-preload/serial/FirstStart (72.92s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/DeployApp (9.35s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/DeployApp
start_stop_delete_test.go:196: (dbg) Run:  kubectl --context no-preload-434675 create -f testdata/busybox.yaml
start_stop_delete_test.go:196: (dbg) TestStartStop/group/no-preload/serial/DeployApp: waiting 8m0s for pods matching "integration-test=busybox" in namespace "default" ...
helpers_test.go:344: "busybox" [89f64539-d85a-44fa-b12a-847a0d28833d] Pending / Ready:ContainersNotReady (containers with unready status: [busybox]) / ContainersReady:ContainersNotReady (containers with unready status: [busybox])
helpers_test.go:344: "busybox" [89f64539-d85a-44fa-b12a-847a0d28833d] Running
E0704 02:35:36.273857 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
E0704 02:35:36.279152 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
E0704 02:35:36.289453 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
E0704 02:35:36.309792 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
E0704 02:35:36.350118 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
E0704 02:35:36.431111 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
E0704 02:35:36.591522 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
E0704 02:35:36.912063 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
E0704 02:35:37.552842 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
start_stop_delete_test.go:196: (dbg) TestStartStop/group/no-preload/serial/DeployApp: integration-test=busybox healthy within 9.003188834s
start_stop_delete_test.go:196: (dbg) Run:  kubectl --context no-preload-434675 exec busybox -- /bin/sh -c "ulimit -n"
--- PASS: TestStartStop/group/no-preload/serial/DeployApp (9.35s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/EnableAddonWhileActive (1.12s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/EnableAddonWhileActive
start_stop_delete_test.go:205: (dbg) Run:  out/minikube-linux-arm64 addons enable metrics-server -p no-preload-434675 --images=MetricsServer=registry.k8s.io/echoserver:1.4 --registries=MetricsServer=fake.domain
E0704 02:35:38.833613 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
start_stop_delete_test.go:205: (dbg) Done: out/minikube-linux-arm64 addons enable metrics-server -p no-preload-434675 --images=MetricsServer=registry.k8s.io/echoserver:1.4 --registries=MetricsServer=fake.domain: (1.020082474s)
start_stop_delete_test.go:215: (dbg) Run:  kubectl --context no-preload-434675 describe deploy/metrics-server -n kube-system
--- PASS: TestStartStop/group/no-preload/serial/EnableAddonWhileActive (1.12s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/Stop (12s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/Stop
start_stop_delete_test.go:228: (dbg) Run:  out/minikube-linux-arm64 stop -p no-preload-434675 --alsologtostderr -v=3
E0704 02:35:41.394137 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
E0704 02:35:46.515221 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
start_stop_delete_test.go:228: (dbg) Done: out/minikube-linux-arm64 stop -p no-preload-434675 --alsologtostderr -v=3: (12.004395559s)
--- PASS: TestStartStop/group/no-preload/serial/Stop (12.00s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/EnableAddonAfterStop (0.19s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/EnableAddonAfterStop
start_stop_delete_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p no-preload-434675 -n no-preload-434675
start_stop_delete_test.go:239: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.Host}} -p no-preload-434675 -n no-preload-434675: exit status 7 (71.576064ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:239: status error: exit status 7 (may be ok)
start_stop_delete_test.go:246: (dbg) Run:  out/minikube-linux-arm64 addons enable dashboard -p no-preload-434675 --images=MetricsScraper=registry.k8s.io/echoserver:1.4
--- PASS: TestStartStop/group/no-preload/serial/EnableAddonAfterStop (0.19s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/SecondStart (289.05s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/SecondStart
start_stop_delete_test.go:256: (dbg) Run:  out/minikube-linux-arm64 start -p no-preload-434675 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=docker  --container-runtime=containerd --kubernetes-version=v1.30.2
E0704 02:35:53.144193 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
E0704 02:35:56.755872 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
E0704 02:36:03.710860 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
E0704 02:36:17.236169 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
E0704 02:36:27.800807 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:36:27.806047 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:36:27.816292 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:36:27.836525 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:36:27.876754 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:36:27.957019 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:36:28.117373 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:36:28.437853 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:36:29.078720 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:36:30.359521 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:36:32.047033 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/auto-104147/client.crt: no such file or directory
E0704 02:36:32.919710 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:36:38.040219 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:36:48.280527 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:36:51.648648 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
start_stop_delete_test.go:256: (dbg) Done: out/minikube-linux-arm64 start -p no-preload-434675 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=docker  --container-runtime=containerd --kubernetes-version=v1.30.2: (4m48.694982429s)
start_stop_delete_test.go:262: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p no-preload-434675 -n no-preload-434675
--- PASS: TestStartStop/group/no-preload/serial/SecondStart (289.05s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/DeployApp (7.54s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/DeployApp
start_stop_delete_test.go:196: (dbg) Run:  kubectl --context old-k8s-version-610521 create -f testdata/busybox.yaml
start_stop_delete_test.go:196: (dbg) TestStartStop/group/old-k8s-version/serial/DeployApp: waiting 8m0s for pods matching "integration-test=busybox" in namespace "default" ...
helpers_test.go:344: "busybox" [22f23f79-67d3-478e-ab60-8837e3505b32] Pending
helpers_test.go:344: "busybox" [22f23f79-67d3-478e-ab60-8837e3505b32] Pending / Ready:ContainersNotReady (containers with unready status: [busybox]) / ContainersReady:ContainersNotReady (containers with unready status: [busybox])
E0704 02:36:58.196670 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
helpers_test.go:344: "busybox" [22f23f79-67d3-478e-ab60-8837e3505b32] Running
start_stop_delete_test.go:196: (dbg) TestStartStop/group/old-k8s-version/serial/DeployApp: integration-test=busybox healthy within 7.003859887s
start_stop_delete_test.go:196: (dbg) Run:  kubectl --context old-k8s-version-610521 exec busybox -- /bin/sh -c "ulimit -n"
--- PASS: TestStartStop/group/old-k8s-version/serial/DeployApp (7.54s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/EnableAddonWhileActive (1.14s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/EnableAddonWhileActive
start_stop_delete_test.go:205: (dbg) Run:  out/minikube-linux-arm64 addons enable metrics-server -p old-k8s-version-610521 --images=MetricsServer=registry.k8s.io/echoserver:1.4 --registries=MetricsServer=fake.domain
start_stop_delete_test.go:205: (dbg) Done: out/minikube-linux-arm64 addons enable metrics-server -p old-k8s-version-610521 --images=MetricsServer=registry.k8s.io/echoserver:1.4 --registries=MetricsServer=fake.domain: (1.01982564s)
start_stop_delete_test.go:215: (dbg) Run:  kubectl --context old-k8s-version-610521 describe deploy/metrics-server -n kube-system
--- PASS: TestStartStop/group/old-k8s-version/serial/EnableAddonWhileActive (1.14s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/Stop (12.11s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/Stop
start_stop_delete_test.go:228: (dbg) Run:  out/minikube-linux-arm64 stop -p old-k8s-version-610521 --alsologtostderr -v=3
E0704 02:37:08.760784 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:37:16.150468 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
E0704 02:37:16.155797 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
E0704 02:37:16.166079 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
E0704 02:37:16.186411 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
E0704 02:37:16.226796 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
E0704 02:37:16.307240 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
E0704 02:37:16.467742 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
E0704 02:37:16.788559 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
E0704 02:37:17.429194 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
start_stop_delete_test.go:228: (dbg) Done: out/minikube-linux-arm64 stop -p old-k8s-version-610521 --alsologtostderr -v=3: (12.107935997s)
--- PASS: TestStartStop/group/old-k8s-version/serial/Stop (12.11s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/EnableAddonAfterStop (0.19s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/EnableAddonAfterStop
start_stop_delete_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p old-k8s-version-610521 -n old-k8s-version-610521
start_stop_delete_test.go:239: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.Host}} -p old-k8s-version-610521 -n old-k8s-version-610521: exit status 7 (69.424019ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:239: status error: exit status 7 (may be ok)
start_stop_delete_test.go:246: (dbg) Run:  out/minikube-linux-arm64 addons enable dashboard -p old-k8s-version-610521 --images=MetricsScraper=registry.k8s.io/echoserver:1.4
--- PASS: TestStartStop/group/old-k8s-version/serial/EnableAddonAfterStop (0.19s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/UserAppExistsAfterStop (6.01s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/UserAppExistsAfterStop
start_stop_delete_test.go:274: (dbg) TestStartStop/group/no-preload/serial/UserAppExistsAfterStop: waiting 9m0s for pods matching "k8s-app=kubernetes-dashboard" in namespace "kubernetes-dashboard" ...
helpers_test.go:344: "kubernetes-dashboard-779776cb65-hfz2b" [f2b03b14-3e63-4255-a4ea-b48deb59c84c] Running
start_stop_delete_test.go:274: (dbg) TestStartStop/group/no-preload/serial/UserAppExistsAfterStop: k8s-app=kubernetes-dashboard healthy within 6.004326049s
--- PASS: TestStartStop/group/no-preload/serial/UserAppExistsAfterStop (6.01s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/AddonExistsAfterStop (6.13s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/AddonExistsAfterStop
start_stop_delete_test.go:287: (dbg) TestStartStop/group/no-preload/serial/AddonExistsAfterStop: waiting 9m0s for pods matching "k8s-app=kubernetes-dashboard" in namespace "kubernetes-dashboard" ...
helpers_test.go:344: "kubernetes-dashboard-779776cb65-hfz2b" [f2b03b14-3e63-4255-a4ea-b48deb59c84c] Running
start_stop_delete_test.go:287: (dbg) TestStartStop/group/no-preload/serial/AddonExistsAfterStop: k8s-app=kubernetes-dashboard healthy within 6.005120853s
start_stop_delete_test.go:291: (dbg) Run:  kubectl --context no-preload-434675 describe deploy/dashboard-metrics-scraper -n kubernetes-dashboard
--- PASS: TestStartStop/group/no-preload/serial/AddonExistsAfterStop (6.13s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/VerifyKubernetesImages (0.25s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/VerifyKubernetesImages
start_stop_delete_test.go:304: (dbg) Run:  out/minikube-linux-arm64 -p no-preload-434675 image list --format=json
E0704 02:40:53.143638 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
start_stop_delete_test.go:304: Found non-minikube image: kindest/kindnetd:v20240513-cd2ac642
start_stop_delete_test.go:304: Found non-minikube image: gcr.io/k8s-minikube/busybox:1.28.4-glibc
--- PASS: TestStartStop/group/no-preload/serial/VerifyKubernetesImages (0.25s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/Pause (3.14s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/Pause
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 pause -p no-preload-434675 --alsologtostderr -v=1
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p no-preload-434675 -n no-preload-434675
start_stop_delete_test.go:311: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.APIServer}} -p no-preload-434675 -n no-preload-434675: exit status 2 (315.263756ms)

                                                
                                                
-- stdout --
	Paused

                                                
                                                
-- /stdout --
start_stop_delete_test.go:311: status error: exit status 2 (may be ok)
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Kubelet}} -p no-preload-434675 -n no-preload-434675
start_stop_delete_test.go:311: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.Kubelet}} -p no-preload-434675 -n no-preload-434675: exit status 2 (324.978719ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:311: status error: exit status 2 (may be ok)
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 unpause -p no-preload-434675 --alsologtostderr -v=1
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p no-preload-434675 -n no-preload-434675
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Kubelet}} -p no-preload-434675 -n no-preload-434675
E0704 02:40:56.163835 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
--- PASS: TestStartStop/group/no-preload/serial/Pause (3.14s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/FirstStart (84.58s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/FirstStart
start_stop_delete_test.go:186: (dbg) Run:  out/minikube-linux-arm64 start -p embed-certs-430955 --memory=2200 --alsologtostderr --wait=true --embed-certs --driver=docker  --container-runtime=containerd --kubernetes-version=v1.30.2
E0704 02:41:03.957138 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
E0704 02:41:25.381774 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
E0704 02:41:27.801614 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:41:51.649538 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
E0704 02:41:55.481850 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:42:16.151112 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
start_stop_delete_test.go:186: (dbg) Done: out/minikube-linux-arm64 start -p embed-certs-430955 --memory=2200 --alsologtostderr --wait=true --embed-certs --driver=docker  --container-runtime=containerd --kubernetes-version=v1.30.2: (1m24.581519996s)
--- PASS: TestStartStop/group/embed-certs/serial/FirstStart (84.58s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/DeployApp (9.4s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/DeployApp
start_stop_delete_test.go:196: (dbg) Run:  kubectl --context embed-certs-430955 create -f testdata/busybox.yaml
start_stop_delete_test.go:196: (dbg) TestStartStop/group/embed-certs/serial/DeployApp: waiting 8m0s for pods matching "integration-test=busybox" in namespace "default" ...
helpers_test.go:344: "busybox" [3740d10e-f614-42bb-bced-df3b58949077] Pending
helpers_test.go:344: "busybox" [3740d10e-f614-42bb-bced-df3b58949077] Pending / Ready:ContainersNotReady (containers with unready status: [busybox]) / ContainersReady:ContainersNotReady (containers with unready status: [busybox])
helpers_test.go:344: "busybox" [3740d10e-f614-42bb-bced-df3b58949077] Running
start_stop_delete_test.go:196: (dbg) TestStartStop/group/embed-certs/serial/DeployApp: integration-test=busybox healthy within 9.00409843s
start_stop_delete_test.go:196: (dbg) Run:  kubectl --context embed-certs-430955 exec busybox -- /bin/sh -c "ulimit -n"
--- PASS: TestStartStop/group/embed-certs/serial/DeployApp (9.40s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/EnableAddonWhileActive (1.18s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/EnableAddonWhileActive
start_stop_delete_test.go:205: (dbg) Run:  out/minikube-linux-arm64 addons enable metrics-server -p embed-certs-430955 --images=MetricsServer=registry.k8s.io/echoserver:1.4 --registries=MetricsServer=fake.domain
start_stop_delete_test.go:205: (dbg) Done: out/minikube-linux-arm64 addons enable metrics-server -p embed-certs-430955 --images=MetricsServer=registry.k8s.io/echoserver:1.4 --registries=MetricsServer=fake.domain: (1.059657911s)
start_stop_delete_test.go:215: (dbg) Run:  kubectl --context embed-certs-430955 describe deploy/metrics-server -n kube-system
--- PASS: TestStartStop/group/embed-certs/serial/EnableAddonWhileActive (1.18s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/Stop (12.06s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/Stop
start_stop_delete_test.go:228: (dbg) Run:  out/minikube-linux-arm64 stop -p embed-certs-430955 --alsologtostderr -v=3
E0704 02:42:43.834158 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
start_stop_delete_test.go:228: (dbg) Done: out/minikube-linux-arm64 stop -p embed-certs-430955 --alsologtostderr -v=3: (12.058654359s)
--- PASS: TestStartStop/group/embed-certs/serial/Stop (12.06s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/EnableAddonAfterStop (0.19s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/EnableAddonAfterStop
start_stop_delete_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p embed-certs-430955 -n embed-certs-430955
start_stop_delete_test.go:239: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.Host}} -p embed-certs-430955 -n embed-certs-430955: exit status 7 (77.460049ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:239: status error: exit status 7 (may be ok)
start_stop_delete_test.go:246: (dbg) Run:  out/minikube-linux-arm64 addons enable dashboard -p embed-certs-430955 --images=MetricsScraper=registry.k8s.io/echoserver:1.4
--- PASS: TestStartStop/group/embed-certs/serial/EnableAddonAfterStop (0.19s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/SecondStart (267.25s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/SecondStart
start_stop_delete_test.go:256: (dbg) Run:  out/minikube-linux-arm64 start -p embed-certs-430955 --memory=2200 --alsologtostderr --wait=true --embed-certs --driver=docker  --container-runtime=containerd --kubernetes-version=v1.30.2
E0704 02:43:12.321367 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
start_stop_delete_test.go:256: (dbg) Done: out/minikube-linux-arm64 start -p embed-certs-430955 --memory=2200 --alsologtostderr --wait=true --embed-certs --driver=docker  --container-runtime=containerd --kubernetes-version=v1.30.2: (4m26.867494941s)
start_stop_delete_test.go:262: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p embed-certs-430955 -n embed-certs-430955
--- PASS: TestStartStop/group/embed-certs/serial/SecondStart (267.25s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/UserAppExistsAfterStop (6.01s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/UserAppExistsAfterStop
start_stop_delete_test.go:274: (dbg) TestStartStop/group/old-k8s-version/serial/UserAppExistsAfterStop: waiting 9m0s for pods matching "k8s-app=kubernetes-dashboard" in namespace "kubernetes-dashboard" ...
helpers_test.go:344: "kubernetes-dashboard-cd95d586-xnqcr" [56f06da0-0fb7-4b2b-8438-0ec06620b2c9] Running
start_stop_delete_test.go:274: (dbg) TestStartStop/group/old-k8s-version/serial/UserAppExistsAfterStop: k8s-app=kubernetes-dashboard healthy within 6.003524021s
--- PASS: TestStartStop/group/old-k8s-version/serial/UserAppExistsAfterStop (6.01s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/AddonExistsAfterStop (6.11s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/AddonExistsAfterStop
start_stop_delete_test.go:287: (dbg) TestStartStop/group/old-k8s-version/serial/AddonExistsAfterStop: waiting 9m0s for pods matching "k8s-app=kubernetes-dashboard" in namespace "kubernetes-dashboard" ...
helpers_test.go:344: "kubernetes-dashboard-cd95d586-xnqcr" [56f06da0-0fb7-4b2b-8438-0ec06620b2c9] Running
E0704 02:43:40.013642 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
E0704 02:43:41.541786 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
start_stop_delete_test.go:287: (dbg) TestStartStop/group/old-k8s-version/serial/AddonExistsAfterStop: k8s-app=kubernetes-dashboard healthy within 6.003395345s
start_stop_delete_test.go:291: (dbg) Run:  kubectl --context old-k8s-version-610521 describe deploy/dashboard-metrics-scraper -n kubernetes-dashboard
--- PASS: TestStartStop/group/old-k8s-version/serial/AddonExistsAfterStop (6.11s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/VerifyKubernetesImages (0.26s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/VerifyKubernetesImages
start_stop_delete_test.go:304: (dbg) Run:  out/minikube-linux-arm64 -p old-k8s-version-610521 image list --format=json
start_stop_delete_test.go:304: Found non-minikube image: kindest/kindnetd:v20240202-8f1494ea
start_stop_delete_test.go:304: Found non-minikube image: kindest/kindnetd:v20240513-cd2ac642
start_stop_delete_test.go:304: Found non-minikube image: gcr.io/k8s-minikube/busybox:1.28.4-glibc
--- PASS: TestStartStop/group/old-k8s-version/serial/VerifyKubernetesImages (0.26s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/Pause (3.11s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/Pause
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 pause -p old-k8s-version-610521 --alsologtostderr -v=1
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p old-k8s-version-610521 -n old-k8s-version-610521
start_stop_delete_test.go:311: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.APIServer}} -p old-k8s-version-610521 -n old-k8s-version-610521: exit status 2 (324.731294ms)

                                                
                                                
-- stdout --
	Paused

                                                
                                                
-- /stdout --
start_stop_delete_test.go:311: status error: exit status 2 (may be ok)
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Kubelet}} -p old-k8s-version-610521 -n old-k8s-version-610521
start_stop_delete_test.go:311: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.Kubelet}} -p old-k8s-version-610521 -n old-k8s-version-610521: exit status 2 (316.334545ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:311: status error: exit status 2 (may be ok)
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 unpause -p old-k8s-version-610521 --alsologtostderr -v=1
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p old-k8s-version-610521 -n old-k8s-version-610521
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Kubelet}} -p old-k8s-version-610521 -n old-k8s-version-610521
--- PASS: TestStartStop/group/old-k8s-version/serial/Pause (3.11s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-diff-port/serial/FirstStart (65.06s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-diff-port/serial/FirstStart
start_stop_delete_test.go:186: (dbg) Run:  out/minikube-linux-arm64 start -p default-k8s-diff-port-052538 --memory=2200 --alsologtostderr --wait=true --apiserver-port=8444 --driver=docker  --container-runtime=containerd --kubernetes-version=v1.30.2
E0704 02:44:09.222574 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/bridge-104147/client.crt: no such file or directory
E0704 02:44:41.788036 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/kindnet-104147/client.crt: no such file or directory
start_stop_delete_test.go:186: (dbg) Done: out/minikube-linux-arm64 start -p default-k8s-diff-port-052538 --memory=2200 --alsologtostderr --wait=true --apiserver-port=8444 --driver=docker  --container-runtime=containerd --kubernetes-version=v1.30.2: (1m5.058516257s)
--- PASS: TestStartStop/group/default-k8s-diff-port/serial/FirstStart (65.06s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-diff-port/serial/DeployApp (9.4s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-diff-port/serial/DeployApp
start_stop_delete_test.go:196: (dbg) Run:  kubectl --context default-k8s-diff-port-052538 create -f testdata/busybox.yaml
start_stop_delete_test.go:196: (dbg) TestStartStop/group/default-k8s-diff-port/serial/DeployApp: waiting 8m0s for pods matching "integration-test=busybox" in namespace "default" ...
helpers_test.go:344: "busybox" [8ae397aa-25d7-4381-b449-ae8f84191b7b] Pending
helpers_test.go:344: "busybox" [8ae397aa-25d7-4381-b449-ae8f84191b7b] Pending / Ready:ContainersNotReady (containers with unready status: [busybox]) / ContainersReady:ContainersNotReady (containers with unready status: [busybox])
helpers_test.go:344: "busybox" [8ae397aa-25d7-4381-b449-ae8f84191b7b] Running
start_stop_delete_test.go:196: (dbg) TestStartStop/group/default-k8s-diff-port/serial/DeployApp: integration-test=busybox healthy within 9.003422796s
start_stop_delete_test.go:196: (dbg) Run:  kubectl --context default-k8s-diff-port-052538 exec busybox -- /bin/sh -c "ulimit -n"
--- PASS: TestStartStop/group/default-k8s-diff-port/serial/DeployApp (9.40s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-diff-port/serial/EnableAddonWhileActive (1.14s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-diff-port/serial/EnableAddonWhileActive
start_stop_delete_test.go:205: (dbg) Run:  out/minikube-linux-arm64 addons enable metrics-server -p default-k8s-diff-port-052538 --images=MetricsServer=registry.k8s.io/echoserver:1.4 --registries=MetricsServer=fake.domain
start_stop_delete_test.go:205: (dbg) Done: out/minikube-linux-arm64 addons enable metrics-server -p default-k8s-diff-port-052538 --images=MetricsServer=registry.k8s.io/echoserver:1.4 --registries=MetricsServer=fake.domain: (1.03432091s)
start_stop_delete_test.go:215: (dbg) Run:  kubectl --context default-k8s-diff-port-052538 describe deploy/metrics-server -n kube-system
--- PASS: TestStartStop/group/default-k8s-diff-port/serial/EnableAddonWhileActive (1.14s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-diff-port/serial/Stop (12.06s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-diff-port/serial/Stop
start_stop_delete_test.go:228: (dbg) Run:  out/minikube-linux-arm64 stop -p default-k8s-diff-port-052538 --alsologtostderr -v=3
start_stop_delete_test.go:228: (dbg) Done: out/minikube-linux-arm64 stop -p default-k8s-diff-port-052538 --alsologtostderr -v=3: (12.059664888s)
--- PASS: TestStartStop/group/default-k8s-diff-port/serial/Stop (12.06s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-diff-port/serial/EnableAddonAfterStop (0.19s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-diff-port/serial/EnableAddonAfterStop
start_stop_delete_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p default-k8s-diff-port-052538 -n default-k8s-diff-port-052538
start_stop_delete_test.go:239: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.Host}} -p default-k8s-diff-port-052538 -n default-k8s-diff-port-052538: exit status 7 (64.609193ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:239: status error: exit status 7 (may be ok)
start_stop_delete_test.go:246: (dbg) Run:  out/minikube-linux-arm64 addons enable dashboard -p default-k8s-diff-port-052538 --images=MetricsScraper=registry.k8s.io/echoserver:1.4
--- PASS: TestStartStop/group/default-k8s-diff-port/serial/EnableAddonAfterStop (0.19s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-diff-port/serial/SecondStart (265.97s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-diff-port/serial/SecondStart
start_stop_delete_test.go:256: (dbg) Run:  out/minikube-linux-arm64 start -p default-k8s-diff-port-052538 --memory=2200 --alsologtostderr --wait=true --apiserver-port=8444 --driver=docker  --container-runtime=containerd --kubernetes-version=v1.30.2
E0704 02:45:29.424644 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/no-preload-434675/client.crt: no such file or directory
E0704 02:45:29.429916 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/no-preload-434675/client.crt: no such file or directory
E0704 02:45:29.440168 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/no-preload-434675/client.crt: no such file or directory
E0704 02:45:29.460414 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/no-preload-434675/client.crt: no such file or directory
E0704 02:45:29.500664 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/no-preload-434675/client.crt: no such file or directory
E0704 02:45:29.580915 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/no-preload-434675/client.crt: no such file or directory
E0704 02:45:29.741286 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/no-preload-434675/client.crt: no such file or directory
E0704 02:45:30.061856 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/no-preload-434675/client.crt: no such file or directory
E0704 02:45:30.702955 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/no-preload-434675/client.crt: no such file or directory
E0704 02:45:31.983114 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/no-preload-434675/client.crt: no such file or directory
E0704 02:45:34.544054 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/no-preload-434675/client.crt: no such file or directory
E0704 02:45:36.274014 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/calico-104147/client.crt: no such file or directory
E0704 02:45:39.664987 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/no-preload-434675/client.crt: no such file or directory
E0704 02:45:49.906030 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/no-preload-434675/client.crt: no such file or directory
E0704 02:45:53.144150 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/addons-155517/client.crt: no such file or directory
E0704 02:46:10.386224 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/no-preload-434675/client.crt: no such file or directory
E0704 02:46:27.800909 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/custom-flannel-104147/client.crt: no such file or directory
E0704 02:46:34.694649 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
E0704 02:46:51.346442 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/no-preload-434675/client.crt: no such file or directory
E0704 02:46:51.649000 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/functional-781779/client.crt: no such file or directory
E0704 02:46:57.179859 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/client.crt: no such file or directory
E0704 02:46:57.185166 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/client.crt: no such file or directory
E0704 02:46:57.195431 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/client.crt: no such file or directory
E0704 02:46:57.215871 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/client.crt: no such file or directory
E0704 02:46:57.256195 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/client.crt: no such file or directory
E0704 02:46:57.336485 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/client.crt: no such file or directory
E0704 02:46:57.496950 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/client.crt: no such file or directory
E0704 02:46:57.817114 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/client.crt: no such file or directory
E0704 02:46:58.458047 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/client.crt: no such file or directory
E0704 02:46:59.739051 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/client.crt: no such file or directory
E0704 02:47:02.299980 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/client.crt: no such file or directory
E0704 02:47:07.420636 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/client.crt: no such file or directory
start_stop_delete_test.go:256: (dbg) Done: out/minikube-linux-arm64 start -p default-k8s-diff-port-052538 --memory=2200 --alsologtostderr --wait=true --apiserver-port=8444 --driver=docker  --container-runtime=containerd --kubernetes-version=v1.30.2: (4m25.627003645s)
start_stop_delete_test.go:262: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p default-k8s-diff-port-052538 -n default-k8s-diff-port-052538
--- PASS: TestStartStop/group/default-k8s-diff-port/serial/SecondStart (265.97s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/UserAppExistsAfterStop (6.01s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/UserAppExistsAfterStop
start_stop_delete_test.go:274: (dbg) TestStartStop/group/embed-certs/serial/UserAppExistsAfterStop: waiting 9m0s for pods matching "k8s-app=kubernetes-dashboard" in namespace "kubernetes-dashboard" ...
helpers_test.go:344: "kubernetes-dashboard-779776cb65-sq7pn" [6d77abac-48ff-4c55-aabd-2c2ff857c91c] Running
E0704 02:47:16.151049 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/enable-default-cni-104147/client.crt: no such file or directory
E0704 02:47:17.660796 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/client.crt: no such file or directory
start_stop_delete_test.go:274: (dbg) TestStartStop/group/embed-certs/serial/UserAppExistsAfterStop: k8s-app=kubernetes-dashboard healthy within 6.003791752s
--- PASS: TestStartStop/group/embed-certs/serial/UserAppExistsAfterStop (6.01s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/AddonExistsAfterStop (5.1s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/AddonExistsAfterStop
start_stop_delete_test.go:287: (dbg) TestStartStop/group/embed-certs/serial/AddonExistsAfterStop: waiting 9m0s for pods matching "k8s-app=kubernetes-dashboard" in namespace "kubernetes-dashboard" ...
helpers_test.go:344: "kubernetes-dashboard-779776cb65-sq7pn" [6d77abac-48ff-4c55-aabd-2c2ff857c91c] Running
start_stop_delete_test.go:287: (dbg) TestStartStop/group/embed-certs/serial/AddonExistsAfterStop: k8s-app=kubernetes-dashboard healthy within 5.004069629s
start_stop_delete_test.go:291: (dbg) Run:  kubectl --context embed-certs-430955 describe deploy/dashboard-metrics-scraper -n kubernetes-dashboard
--- PASS: TestStartStop/group/embed-certs/serial/AddonExistsAfterStop (5.10s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/VerifyKubernetesImages (0.25s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/VerifyKubernetesImages
start_stop_delete_test.go:304: (dbg) Run:  out/minikube-linux-arm64 -p embed-certs-430955 image list --format=json
start_stop_delete_test.go:304: Found non-minikube image: kindest/kindnetd:v20240513-cd2ac642
start_stop_delete_test.go:304: Found non-minikube image: gcr.io/k8s-minikube/busybox:1.28.4-glibc
--- PASS: TestStartStop/group/embed-certs/serial/VerifyKubernetesImages (0.25s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/Pause (3.26s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/Pause
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 pause -p embed-certs-430955 --alsologtostderr -v=1
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p embed-certs-430955 -n embed-certs-430955
start_stop_delete_test.go:311: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.APIServer}} -p embed-certs-430955 -n embed-certs-430955: exit status 2 (323.140513ms)

                                                
                                                
-- stdout --
	Paused

                                                
                                                
-- /stdout --
start_stop_delete_test.go:311: status error: exit status 2 (may be ok)
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Kubelet}} -p embed-certs-430955 -n embed-certs-430955
start_stop_delete_test.go:311: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.Kubelet}} -p embed-certs-430955 -n embed-certs-430955: exit status 2 (328.81798ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:311: status error: exit status 2 (may be ok)
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 unpause -p embed-certs-430955 --alsologtostderr -v=1
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p embed-certs-430955 -n embed-certs-430955
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Kubelet}} -p embed-certs-430955 -n embed-certs-430955
--- PASS: TestStartStop/group/embed-certs/serial/Pause (3.26s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/FirstStart (44.12s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/FirstStart
start_stop_delete_test.go:186: (dbg) Run:  out/minikube-linux-arm64 start -p newest-cni-308029 --memory=2200 --alsologtostderr --wait=apiserver,system_pods,default_sa --feature-gates ServerSideApply=true --network-plugin=cni --extra-config=kubeadm.pod-network-cidr=10.42.0.0/16 --driver=docker  --container-runtime=containerd --kubernetes-version=v1.30.2
E0704 02:47:38.142903 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/client.crt: no such file or directory
E0704 02:48:12.321155 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/flannel-104147/client.crt: no such file or directory
E0704 02:48:13.266824 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/no-preload-434675/client.crt: no such file or directory
start_stop_delete_test.go:186: (dbg) Done: out/minikube-linux-arm64 start -p newest-cni-308029 --memory=2200 --alsologtostderr --wait=apiserver,system_pods,default_sa --feature-gates ServerSideApply=true --network-plugin=cni --extra-config=kubeadm.pod-network-cidr=10.42.0.0/16 --driver=docker  --container-runtime=containerd --kubernetes-version=v1.30.2: (44.12418915s)
--- PASS: TestStartStop/group/newest-cni/serial/FirstStart (44.12s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/DeployApp (0s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/DeployApp
--- PASS: TestStartStop/group/newest-cni/serial/DeployApp (0.00s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/EnableAddonWhileActive (1.18s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/EnableAddonWhileActive
start_stop_delete_test.go:205: (dbg) Run:  out/minikube-linux-arm64 addons enable metrics-server -p newest-cni-308029 --images=MetricsServer=registry.k8s.io/echoserver:1.4 --registries=MetricsServer=fake.domain
start_stop_delete_test.go:205: (dbg) Done: out/minikube-linux-arm64 addons enable metrics-server -p newest-cni-308029 --images=MetricsServer=registry.k8s.io/echoserver:1.4 --registries=MetricsServer=fake.domain: (1.181074792s)
start_stop_delete_test.go:211: WARNING: cni mode requires additional setup before pods can schedule :(
--- PASS: TestStartStop/group/newest-cni/serial/EnableAddonWhileActive (1.18s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/Stop (1.25s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/Stop
start_stop_delete_test.go:228: (dbg) Run:  out/minikube-linux-arm64 stop -p newest-cni-308029 --alsologtostderr -v=3
start_stop_delete_test.go:228: (dbg) Done: out/minikube-linux-arm64 stop -p newest-cni-308029 --alsologtostderr -v=3: (1.253664887s)
--- PASS: TestStartStop/group/newest-cni/serial/Stop (1.25s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/EnableAddonAfterStop (0.18s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/EnableAddonAfterStop
start_stop_delete_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p newest-cni-308029 -n newest-cni-308029
start_stop_delete_test.go:239: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.Host}} -p newest-cni-308029 -n newest-cni-308029: exit status 7 (70.363872ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:239: status error: exit status 7 (may be ok)
start_stop_delete_test.go:246: (dbg) Run:  out/minikube-linux-arm64 addons enable dashboard -p newest-cni-308029 --images=MetricsScraper=registry.k8s.io/echoserver:1.4
--- PASS: TestStartStop/group/newest-cni/serial/EnableAddonAfterStop (0.18s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/SecondStart (15.68s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/SecondStart
start_stop_delete_test.go:256: (dbg) Run:  out/minikube-linux-arm64 start -p newest-cni-308029 --memory=2200 --alsologtostderr --wait=apiserver,system_pods,default_sa --feature-gates ServerSideApply=true --network-plugin=cni --extra-config=kubeadm.pod-network-cidr=10.42.0.0/16 --driver=docker  --container-runtime=containerd --kubernetes-version=v1.30.2
E0704 02:48:19.103969 1195688 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/old-k8s-version-610521/client.crt: no such file or directory
start_stop_delete_test.go:256: (dbg) Done: out/minikube-linux-arm64 start -p newest-cni-308029 --memory=2200 --alsologtostderr --wait=apiserver,system_pods,default_sa --feature-gates ServerSideApply=true --network-plugin=cni --extra-config=kubeadm.pod-network-cidr=10.42.0.0/16 --driver=docker  --container-runtime=containerd --kubernetes-version=v1.30.2: (15.335508828s)
start_stop_delete_test.go:262: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p newest-cni-308029 -n newest-cni-308029
--- PASS: TestStartStop/group/newest-cni/serial/SecondStart (15.68s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/UserAppExistsAfterStop (0s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/UserAppExistsAfterStop
start_stop_delete_test.go:273: WARNING: cni mode requires additional setup before pods can schedule :(
--- PASS: TestStartStop/group/newest-cni/serial/UserAppExistsAfterStop (0.00s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/AddonExistsAfterStop (0s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/AddonExistsAfterStop
start_stop_delete_test.go:284: WARNING: cni mode requires additional setup before pods can schedule :(
--- PASS: TestStartStop/group/newest-cni/serial/AddonExistsAfterStop (0.00s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/VerifyKubernetesImages (0.25s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/VerifyKubernetesImages
start_stop_delete_test.go:304: (dbg) Run:  out/minikube-linux-arm64 -p newest-cni-308029 image list --format=json
start_stop_delete_test.go:304: Found non-minikube image: kindest/kindnetd:v20240513-cd2ac642
--- PASS: TestStartStop/group/newest-cni/serial/VerifyKubernetesImages (0.25s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/Pause (3s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/Pause
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 pause -p newest-cni-308029 --alsologtostderr -v=1
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p newest-cni-308029 -n newest-cni-308029
start_stop_delete_test.go:311: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.APIServer}} -p newest-cni-308029 -n newest-cni-308029: exit status 2 (339.168831ms)

                                                
                                                
-- stdout --
	Paused

                                                
                                                
-- /stdout --
start_stop_delete_test.go:311: status error: exit status 2 (may be ok)
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Kubelet}} -p newest-cni-308029 -n newest-cni-308029
start_stop_delete_test.go:311: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.Kubelet}} -p newest-cni-308029 -n newest-cni-308029: exit status 2 (350.087386ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:311: status error: exit status 2 (may be ok)
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 unpause -p newest-cni-308029 --alsologtostderr -v=1
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p newest-cni-308029 -n newest-cni-308029
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Kubelet}} -p newest-cni-308029 -n newest-cni-308029
--- PASS: TestStartStop/group/newest-cni/serial/Pause (3.00s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-diff-port/serial/UserAppExistsAfterStop (6.01s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-diff-port/serial/UserAppExistsAfterStop
start_stop_delete_test.go:274: (dbg) TestStartStop/group/default-k8s-diff-port/serial/UserAppExistsAfterStop: waiting 9m0s for pods matching "k8s-app=kubernetes-dashboard" in namespace "kubernetes-dashboard" ...
helpers_test.go:344: "kubernetes-dashboard-779776cb65-fwk4v" [5487f442-35a5-4ec9-a83a-1ebd0e5f2b48] Running
start_stop_delete_test.go:274: (dbg) TestStartStop/group/default-k8s-diff-port/serial/UserAppExistsAfterStop: k8s-app=kubernetes-dashboard healthy within 6.004167314s
--- PASS: TestStartStop/group/default-k8s-diff-port/serial/UserAppExistsAfterStop (6.01s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-diff-port/serial/AddonExistsAfterStop (5.09s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-diff-port/serial/AddonExistsAfterStop
start_stop_delete_test.go:287: (dbg) TestStartStop/group/default-k8s-diff-port/serial/AddonExistsAfterStop: waiting 9m0s for pods matching "k8s-app=kubernetes-dashboard" in namespace "kubernetes-dashboard" ...
helpers_test.go:344: "kubernetes-dashboard-779776cb65-fwk4v" [5487f442-35a5-4ec9-a83a-1ebd0e5f2b48] Running
start_stop_delete_test.go:287: (dbg) TestStartStop/group/default-k8s-diff-port/serial/AddonExistsAfterStop: k8s-app=kubernetes-dashboard healthy within 5.003857503s
start_stop_delete_test.go:291: (dbg) Run:  kubectl --context default-k8s-diff-port-052538 describe deploy/dashboard-metrics-scraper -n kubernetes-dashboard
--- PASS: TestStartStop/group/default-k8s-diff-port/serial/AddonExistsAfterStop (5.09s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-diff-port/serial/VerifyKubernetesImages (0.25s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-diff-port/serial/VerifyKubernetesImages
start_stop_delete_test.go:304: (dbg) Run:  out/minikube-linux-arm64 -p default-k8s-diff-port-052538 image list --format=json
start_stop_delete_test.go:304: Found non-minikube image: gcr.io/k8s-minikube/busybox:1.28.4-glibc
start_stop_delete_test.go:304: Found non-minikube image: kindest/kindnetd:v20240513-cd2ac642
--- PASS: TestStartStop/group/default-k8s-diff-port/serial/VerifyKubernetesImages (0.25s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-diff-port/serial/Pause (3.04s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-diff-port/serial/Pause
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 pause -p default-k8s-diff-port-052538 --alsologtostderr -v=1
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p default-k8s-diff-port-052538 -n default-k8s-diff-port-052538
start_stop_delete_test.go:311: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.APIServer}} -p default-k8s-diff-port-052538 -n default-k8s-diff-port-052538: exit status 2 (317.621821ms)

                                                
                                                
-- stdout --
	Paused

                                                
                                                
-- /stdout --
start_stop_delete_test.go:311: status error: exit status 2 (may be ok)
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Kubelet}} -p default-k8s-diff-port-052538 -n default-k8s-diff-port-052538
start_stop_delete_test.go:311: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.Kubelet}} -p default-k8s-diff-port-052538 -n default-k8s-diff-port-052538: exit status 2 (306.753061ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:311: status error: exit status 2 (may be ok)
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 unpause -p default-k8s-diff-port-052538 --alsologtostderr -v=1
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p default-k8s-diff-port-052538 -n default-k8s-diff-port-052538
start_stop_delete_test.go:311: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Kubelet}} -p default-k8s-diff-port-052538 -n default-k8s-diff-port-052538
--- PASS: TestStartStop/group/default-k8s-diff-port/serial/Pause (3.04s)

                                                
                                    

Test skip (28/328)

x
+
TestDownloadOnly/v1.20.0/cached-images (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/cached-images
aaa_download_only_test.go:129: Preload exists, images won't be cached
--- SKIP: TestDownloadOnly/v1.20.0/cached-images (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/binaries (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/binaries
aaa_download_only_test.go:151: Preload exists, binaries are present within.
--- SKIP: TestDownloadOnly/v1.20.0/binaries (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/kubectl (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/kubectl
aaa_download_only_test.go:167: Test for darwin and windows
--- SKIP: TestDownloadOnly/v1.20.0/kubectl (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.30.2/cached-images (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.30.2/cached-images
aaa_download_only_test.go:129: Preload exists, images won't be cached
--- SKIP: TestDownloadOnly/v1.30.2/cached-images (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.30.2/binaries (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.30.2/binaries
aaa_download_only_test.go:151: Preload exists, binaries are present within.
--- SKIP: TestDownloadOnly/v1.30.2/binaries (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.30.2/kubectl (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.30.2/kubectl
aaa_download_only_test.go:167: Test for darwin and windows
--- SKIP: TestDownloadOnly/v1.30.2/kubectl (0.00s)

                                                
                                    
x
+
TestDownloadOnlyKic (0.56s)

                                                
                                                
=== RUN   TestDownloadOnlyKic
aaa_download_only_test.go:232: (dbg) Run:  out/minikube-linux-arm64 start --download-only -p download-docker-898650 --alsologtostderr --driver=docker  --container-runtime=containerd
aaa_download_only_test.go:244: Skip for arm64 platform. See https://github.com/kubernetes/minikube/issues/10144
helpers_test.go:175: Cleaning up "download-docker-898650" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p download-docker-898650
--- SKIP: TestDownloadOnlyKic (0.56s)

                                                
                                    
x
+
TestOffline (0s)

                                                
                                                
=== RUN   TestOffline
=== PAUSE TestOffline

                                                
                                                

                                                
                                                
=== CONT  TestOffline
aab_offline_test.go:35: skipping TestOffline - only docker runtime supported on arm64. See https://github.com/kubernetes/minikube/issues/10144
--- SKIP: TestOffline (0.00s)

                                                
                                    
x
+
TestAddons/parallel/HelmTiller (0s)

                                                
                                                
=== RUN   TestAddons/parallel/HelmTiller
=== PAUSE TestAddons/parallel/HelmTiller

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/HelmTiller
addons_test.go:446: skip Helm test on arm64
--- SKIP: TestAddons/parallel/HelmTiller (0.00s)

                                                
                                    
x
+
TestAddons/parallel/Olm (0s)

                                                
                                                
=== RUN   TestAddons/parallel/Olm
=== PAUSE TestAddons/parallel/Olm

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Olm
addons_test.go:500: Skipping OLM addon test until https://github.com/operator-framework/operator-lifecycle-manager/issues/2534 is resolved
--- SKIP: TestAddons/parallel/Olm (0.00s)

                                                
                                    
x
+
TestDockerFlags (0s)

                                                
                                                
=== RUN   TestDockerFlags
docker_test.go:41: skipping: only runs with docker container runtime, currently testing containerd
--- SKIP: TestDockerFlags (0.00s)

                                                
                                    
x
+
TestKVMDriverInstallOrUpdate (0s)

                                                
                                                
=== RUN   TestKVMDriverInstallOrUpdate
driver_install_or_update_test.go:45: Skip if arm64. See https://github.com/kubernetes/minikube/issues/10144
--- SKIP: TestKVMDriverInstallOrUpdate (0.00s)

                                                
                                    
x
+
TestHyperKitDriverInstallOrUpdate (0s)

                                                
                                                
=== RUN   TestHyperKitDriverInstallOrUpdate
driver_install_or_update_test.go:105: Skip if not darwin.
--- SKIP: TestHyperKitDriverInstallOrUpdate (0.00s)

                                                
                                    
x
+
TestHyperkitDriverSkipUpgrade (0s)

                                                
                                                
=== RUN   TestHyperkitDriverSkipUpgrade
driver_install_or_update_test.go:169: Skip if not darwin.
--- SKIP: TestHyperkitDriverSkipUpgrade (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/MySQL (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/MySQL
=== PAUSE TestFunctional/parallel/MySQL

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/MySQL
functional_test.go:1783: arm64 is not supported by mysql. Skip the test. See https://github.com/kubernetes/minikube/issues/10144
--- SKIP: TestFunctional/parallel/MySQL (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/DockerEnv (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/DockerEnv
=== PAUSE TestFunctional/parallel/DockerEnv

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/DockerEnv
functional_test.go:459: only validate docker env with docker container runtime, currently testing containerd
--- SKIP: TestFunctional/parallel/DockerEnv (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/PodmanEnv (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/PodmanEnv
=== PAUSE TestFunctional/parallel/PodmanEnv

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/PodmanEnv
functional_test.go:546: only validate podman env with docker container runtime, currently testing containerd
--- SKIP: TestFunctional/parallel/PodmanEnv (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDig (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDig
functional_test_tunnel_test.go:99: DNS forwarding is only supported for Hyperkit on Darwin, skipping test DNS forwarding
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDig (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDscacheutil (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDscacheutil
functional_test_tunnel_test.go:99: DNS forwarding is only supported for Hyperkit on Darwin, skipping test DNS forwarding
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDscacheutil (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/AccessThroughDNS (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/AccessThroughDNS
functional_test_tunnel_test.go:99: DNS forwarding is only supported for Hyperkit on Darwin, skipping test DNS forwarding
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/AccessThroughDNS (0.00s)

                                                
                                    
x
+
TestGvisorAddon (0s)

                                                
                                                
=== RUN   TestGvisorAddon
gvisor_addon_test.go:34: skipping test because --gvisor=false
--- SKIP: TestGvisorAddon (0.00s)

                                                
                                    
x
+
TestImageBuild (0s)

                                                
                                                
=== RUN   TestImageBuild
image_test.go:33: 
--- SKIP: TestImageBuild (0.00s)

                                                
                                    
x
+
TestChangeNoneUser (0s)

                                                
                                                
=== RUN   TestChangeNoneUser
none_test.go:38: Test requires none driver and SUDO_USER env to not be empty
--- SKIP: TestChangeNoneUser (0.00s)

                                                
                                    
x
+
TestScheduledStopWindows (0s)

                                                
                                                
=== RUN   TestScheduledStopWindows
scheduled_stop_test.go:42: test only runs on windows
--- SKIP: TestScheduledStopWindows (0.00s)

                                                
                                    
x
+
TestSkaffold (0s)

                                                
                                                
=== RUN   TestSkaffold
skaffold_test.go:45: skaffold requires docker-env, currently testing containerd container runtime
--- SKIP: TestSkaffold (0.00s)

                                                
                                    
x
+
TestNetworkPlugins/group/kubenet (3.46s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/kubenet
net_test.go:93: Skipping the test as containerd container runtimes requires CNI
panic.go:626: 
----------------------- debugLogs start: kubenet-104147 [pass: true] --------------------------------
>>> netcat: nslookup kubernetes.default:
Error in configuration: context was not found for specified context: kubenet-104147

                                                
                                                

                                                
                                                
>>> netcat: nslookup debug kubernetes.default a-records:
Error in configuration: context was not found for specified context: kubenet-104147

                                                
                                                

                                                
                                                
>>> netcat: dig search kubernetes.default:
Error in configuration: context was not found for specified context: kubenet-104147

                                                
                                                

                                                
                                                
>>> netcat: dig @10.96.0.10 kubernetes.default.svc.cluster.local udp/53:
Error in configuration: context was not found for specified context: kubenet-104147

                                                
                                                

                                                
                                                
>>> netcat: dig @10.96.0.10 kubernetes.default.svc.cluster.local tcp/53:
Error in configuration: context was not found for specified context: kubenet-104147

                                                
                                                

                                                
                                                
>>> netcat: nc 10.96.0.10 udp/53:
Error in configuration: context was not found for specified context: kubenet-104147

                                                
                                                

                                                
                                                
>>> netcat: nc 10.96.0.10 tcp/53:
Error in configuration: context was not found for specified context: kubenet-104147

                                                
                                                

                                                
                                                
>>> netcat: /etc/nsswitch.conf:
Error in configuration: context was not found for specified context: kubenet-104147

                                                
                                                

                                                
                                                
>>> netcat: /etc/hosts:
Error in configuration: context was not found for specified context: kubenet-104147

                                                
                                                

                                                
                                                
>>> netcat: /etc/resolv.conf:
Error in configuration: context was not found for specified context: kubenet-104147

                                                
                                                

                                                
                                                
>>> host: /etc/nsswitch.conf:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/hosts:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/resolv.conf:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> k8s: nodes, services, endpoints, daemon sets, deployments and pods, :
Error in configuration: context was not found for specified context: kubenet-104147

                                                
                                                

                                                
                                                
>>> host: crictl pods:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: crictl containers:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> k8s: describe netcat deployment:
error: context "kubenet-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: describe netcat pod(s):
error: context "kubenet-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: netcat logs:
error: context "kubenet-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: describe coredns deployment:
error: context "kubenet-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: describe coredns pods:
error: context "kubenet-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: coredns logs:
error: context "kubenet-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: describe api server pod(s):
error: context "kubenet-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: api server logs:
error: context "kubenet-104147" does not exist

                                                
                                                

                                                
                                                
>>> host: /etc/cni:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: ip a s:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: ip r s:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: iptables-save:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: iptables table nat:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> k8s: describe kube-proxy daemon set:
error: context "kubenet-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: describe kube-proxy pod(s):
error: context "kubenet-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: kube-proxy logs:
error: context "kubenet-104147" does not exist

                                                
                                                

                                                
                                                
>>> host: kubelet daemon status:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: kubelet daemon config:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> k8s: kubelet logs:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/kubernetes/kubelet.conf:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: /var/lib/kubelet/config.yaml:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> k8s: kubectl config:
apiVersion: v1
clusters:
- cluster:
certificate-authority: /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt
extensions:
- extension:
last-update: Thu, 04 Jul 2024 02:17:59 UTC
provider: minikube.sigs.k8s.io
version: v1.33.1
name: cluster_info
server: https://192.168.76.2:8443
name: pause-736265
contexts:
- context:
cluster: pause-736265
extensions:
- extension:
last-update: Thu, 04 Jul 2024 02:17:59 UTC
provider: minikube.sigs.k8s.io
version: v1.33.1
name: context_info
namespace: default
user: pause-736265
name: pause-736265
current-context: pause-736265
kind: Config
preferences: {}
users:
- name: pause-736265
user:
client-certificate: /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/pause-736265/client.crt
client-key: /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/pause-736265/client.key

                                                
                                                

                                                
                                                
>>> k8s: cms:
Error in configuration: context was not found for specified context: kubenet-104147

                                                
                                                

                                                
                                                
>>> host: docker daemon status:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: docker daemon config:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/docker/daemon.json:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: docker system info:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: cri-docker daemon status:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: cri-docker daemon config:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/systemd/system/cri-docker.service.d/10-cni.conf:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: /usr/lib/systemd/system/cri-docker.service:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: cri-dockerd version:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: containerd daemon status:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: containerd daemon config:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: /lib/systemd/system/containerd.service:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/containerd/config.toml:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: containerd config dump:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: crio daemon status:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: crio daemon config:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/crio:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                

                                                
                                                
>>> host: crio config:
* Profile "kubenet-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p kubenet-104147"

                                                
                                                
----------------------- debugLogs end: kubenet-104147 [took: 3.305637843s] --------------------------------
helpers_test.go:175: Cleaning up "kubenet-104147" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p kubenet-104147
--- SKIP: TestNetworkPlugins/group/kubenet (3.46s)

                                                
                                    
x
+
TestNetworkPlugins/group/cilium (3.92s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/cilium
net_test.go:102: Skipping the test as it's interfering with other tests and is outdated
panic.go:626: 
----------------------- debugLogs start: cilium-104147 [pass: true] --------------------------------
>>> netcat: nslookup kubernetes.default:
Error in configuration: context was not found for specified context: cilium-104147

                                                
                                                

                                                
                                                
>>> netcat: nslookup debug kubernetes.default a-records:
Error in configuration: context was not found for specified context: cilium-104147

                                                
                                                

                                                
                                                
>>> netcat: dig search kubernetes.default:
Error in configuration: context was not found for specified context: cilium-104147

                                                
                                                

                                                
                                                
>>> netcat: dig @10.96.0.10 kubernetes.default.svc.cluster.local udp/53:
Error in configuration: context was not found for specified context: cilium-104147

                                                
                                                

                                                
                                                
>>> netcat: dig @10.96.0.10 kubernetes.default.svc.cluster.local tcp/53:
Error in configuration: context was not found for specified context: cilium-104147

                                                
                                                

                                                
                                                
>>> netcat: nc 10.96.0.10 udp/53:
Error in configuration: context was not found for specified context: cilium-104147

                                                
                                                

                                                
                                                
>>> netcat: nc 10.96.0.10 tcp/53:
Error in configuration: context was not found for specified context: cilium-104147

                                                
                                                

                                                
                                                
>>> netcat: /etc/nsswitch.conf:
Error in configuration: context was not found for specified context: cilium-104147

                                                
                                                

                                                
                                                
>>> netcat: /etc/hosts:
Error in configuration: context was not found for specified context: cilium-104147

                                                
                                                

                                                
                                                
>>> netcat: /etc/resolv.conf:
Error in configuration: context was not found for specified context: cilium-104147

                                                
                                                

                                                
                                                
>>> host: /etc/nsswitch.conf:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/hosts:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/resolv.conf:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> k8s: nodes, services, endpoints, daemon sets, deployments and pods, :
Error in configuration: context was not found for specified context: cilium-104147

                                                
                                                

                                                
                                                
>>> host: crictl pods:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: crictl containers:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> k8s: describe netcat deployment:
error: context "cilium-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: describe netcat pod(s):
error: context "cilium-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: netcat logs:
error: context "cilium-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: describe coredns deployment:
error: context "cilium-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: describe coredns pods:
error: context "cilium-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: coredns logs:
error: context "cilium-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: describe api server pod(s):
error: context "cilium-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: api server logs:
error: context "cilium-104147" does not exist

                                                
                                                

                                                
                                                
>>> host: /etc/cni:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: ip a s:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: ip r s:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: iptables-save:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: iptables table nat:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> k8s: describe cilium daemon set:
Error in configuration: context was not found for specified context: cilium-104147

                                                
                                                

                                                
                                                
>>> k8s: describe cilium daemon set pod(s):
Error in configuration: context was not found for specified context: cilium-104147

                                                
                                                

                                                
                                                
>>> k8s: cilium daemon set container(s) logs (current):
error: context "cilium-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: cilium daemon set container(s) logs (previous):
error: context "cilium-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: describe cilium deployment:
Error in configuration: context was not found for specified context: cilium-104147

                                                
                                                

                                                
                                                
>>> k8s: describe cilium deployment pod(s):
Error in configuration: context was not found for specified context: cilium-104147

                                                
                                                

                                                
                                                
>>> k8s: cilium deployment container(s) logs (current):
error: context "cilium-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: cilium deployment container(s) logs (previous):
error: context "cilium-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: describe kube-proxy daemon set:
error: context "cilium-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: describe kube-proxy pod(s):
error: context "cilium-104147" does not exist

                                                
                                                

                                                
                                                
>>> k8s: kube-proxy logs:
error: context "cilium-104147" does not exist

                                                
                                                

                                                
                                                
>>> host: kubelet daemon status:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: kubelet daemon config:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> k8s: kubelet logs:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/kubernetes/kubelet.conf:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: /var/lib/kubelet/config.yaml:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> k8s: kubectl config:
apiVersion: v1
clusters:
- cluster:
certificate-authority: /home/jenkins/minikube-integration/18859-1190282/.minikube/ca.crt
extensions:
- extension:
last-update: Thu, 04 Jul 2024 02:17:59 UTC
provider: minikube.sigs.k8s.io
version: v1.33.1
name: cluster_info
server: https://192.168.76.2:8443
name: pause-736265
contexts:
- context:
cluster: pause-736265
extensions:
- extension:
last-update: Thu, 04 Jul 2024 02:17:59 UTC
provider: minikube.sigs.k8s.io
version: v1.33.1
name: context_info
namespace: default
user: pause-736265
name: pause-736265
current-context: pause-736265
kind: Config
preferences: {}
users:
- name: pause-736265
user:
client-certificate: /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/pause-736265/client.crt
client-key: /home/jenkins/minikube-integration/18859-1190282/.minikube/profiles/pause-736265/client.key

                                                
                                                

                                                
                                                
>>> k8s: cms:
Error in configuration: context was not found for specified context: cilium-104147

                                                
                                                

                                                
                                                
>>> host: docker daemon status:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: docker daemon config:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/docker/daemon.json:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: docker system info:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: cri-docker daemon status:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: cri-docker daemon config:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/systemd/system/cri-docker.service.d/10-cni.conf:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: /usr/lib/systemd/system/cri-docker.service:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: cri-dockerd version:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: containerd daemon status:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: containerd daemon config:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: /lib/systemd/system/containerd.service:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/containerd/config.toml:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: containerd config dump:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: crio daemon status:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: crio daemon config:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: /etc/crio:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                

                                                
                                                
>>> host: crio config:
* Profile "cilium-104147" not found. Run "minikube profile list" to view all profiles.
To start a cluster, run: "minikube start -p cilium-104147"

                                                
                                                
----------------------- debugLogs end: cilium-104147 [took: 3.769671746s] --------------------------------
helpers_test.go:175: Cleaning up "cilium-104147" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p cilium-104147
--- SKIP: TestNetworkPlugins/group/cilium (3.92s)

                                                
                                    
x
+
TestStartStop/group/disable-driver-mounts (0.17s)

                                                
                                                
=== RUN   TestStartStop/group/disable-driver-mounts
=== PAUSE TestStartStop/group/disable-driver-mounts

                                                
                                                

                                                
                                                
=== CONT  TestStartStop/group/disable-driver-mounts
start_stop_delete_test.go:103: skipping TestStartStop/group/disable-driver-mounts - only runs on virtualbox
helpers_test.go:175: Cleaning up "disable-driver-mounts-408119" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p disable-driver-mounts-408119
--- SKIP: TestStartStop/group/disable-driver-mounts (0.17s)

                                                
                                    
Copied to clipboard