=== RUN TestAddons/parallel/Ingress
=== PAUSE TestAddons/parallel/Ingress
=== CONT TestAddons/parallel/Ingress
addons_test.go:207: (dbg) Run: kubectl --context addons-642352 wait --for=condition=ready --namespace=ingress-nginx pod --selector=app.kubernetes.io/component=controller --timeout=90s
addons_test.go:207: (dbg) Done: kubectl --context addons-642352 wait --for=condition=ready --namespace=ingress-nginx pod --selector=app.kubernetes.io/component=controller --timeout=90s: (9.498638458s)
addons_test.go:232: (dbg) Run: kubectl --context addons-642352 replace --force -f testdata/nginx-ingress-v1.yaml
addons_test.go:245: (dbg) Run: kubectl --context addons-642352 replace --force -f testdata/nginx-pod-svc.yaml
addons_test.go:250: (dbg) TestAddons/parallel/Ingress: waiting 8m0s for pods matching "run=nginx" in namespace "default" ...
helpers_test.go:344: "nginx" [58372957-d99b-4103-b2c6-ed71643619c1] Pending / Ready:ContainersNotReady (containers with unready status: [nginx]) / ContainersReady:ContainersNotReady (containers with unready status: [nginx])
helpers_test.go:344: "nginx" [58372957-d99b-4103-b2c6-ed71643619c1] Running
addons_test.go:250: (dbg) TestAddons/parallel/Ingress: run=nginx healthy within 11.004003718s
addons_test.go:262: (dbg) Run: out/minikube-linux-amd64 -p addons-642352 ssh "curl -s http://127.0.0.1/ -H 'Host: nginx.example.com'"
addons_test.go:262: (dbg) Non-zero exit: out/minikube-linux-amd64 -p addons-642352 ssh "curl -s http://127.0.0.1/ -H 'Host: nginx.example.com'": exit status 1 (2m10.914983254s)
** stderr **
ssh: Process exited with status 28
** /stderr **
addons_test.go:278: failed to get expected response from http://127.0.0.1/ within minikube: exit status 1
addons_test.go:286: (dbg) Run: kubectl --context addons-642352 replace --force -f testdata/ingress-dns-example-v1.yaml
addons_test.go:291: (dbg) Run: out/minikube-linux-amd64 -p addons-642352 ip
addons_test.go:297: (dbg) Run: nslookup hello-john.test 192.168.49.2
addons_test.go:306: (dbg) Run: out/minikube-linux-amd64 -p addons-642352 addons disable ingress-dns --alsologtostderr -v=1
addons_test.go:306: (dbg) Done: out/minikube-linux-amd64 -p addons-642352 addons disable ingress-dns --alsologtostderr -v=1: (1.064443062s)
addons_test.go:311: (dbg) Run: out/minikube-linux-amd64 -p addons-642352 addons disable ingress --alsologtostderr -v=1
addons_test.go:311: (dbg) Done: out/minikube-linux-amd64 -p addons-642352 addons disable ingress --alsologtostderr -v=1: (7.686218983s)
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======> post-mortem[TestAddons/parallel/Ingress]: docker inspect <======
helpers_test.go:231: (dbg) Run: docker inspect addons-642352
helpers_test.go:235: (dbg) docker inspect addons-642352:
-- stdout --
[
{
"Id": "ba9aca09f642738d1e391d3fcd2462426a7803a0e2d60cc2f60823541ed64bf0",
"Created": "2024-02-01T09:09:12.895842154Z",
"Path": "/usr/local/bin/entrypoint",
"Args": [
"/sbin/init"
],
"State": {
"Status": "running",
"Running": true,
"Paused": false,
"Restarting": false,
"OOMKilled": false,
"Dead": false,
"Pid": 961921,
"ExitCode": 0,
"Error": "",
"StartedAt": "2024-02-01T09:09:13.201657962Z",
"FinishedAt": "0001-01-01T00:00:00Z"
},
"Image": "sha256:9941de2e064a4a6a7155bfc66cedd2854b8c725b77bb8d4eaf81bef39f951dd7",
"ResolvConfPath": "/var/lib/docker/containers/ba9aca09f642738d1e391d3fcd2462426a7803a0e2d60cc2f60823541ed64bf0/resolv.conf",
"HostnamePath": "/var/lib/docker/containers/ba9aca09f642738d1e391d3fcd2462426a7803a0e2d60cc2f60823541ed64bf0/hostname",
"HostsPath": "/var/lib/docker/containers/ba9aca09f642738d1e391d3fcd2462426a7803a0e2d60cc2f60823541ed64bf0/hosts",
"LogPath": "/var/lib/docker/containers/ba9aca09f642738d1e391d3fcd2462426a7803a0e2d60cc2f60823541ed64bf0/ba9aca09f642738d1e391d3fcd2462426a7803a0e2d60cc2f60823541ed64bf0-json.log",
"Name": "/addons-642352",
"RestartCount": 0,
"Driver": "overlay2",
"Platform": "linux",
"MountLabel": "",
"ProcessLabel": "",
"AppArmorProfile": "unconfined",
"ExecIDs": null,
"HostConfig": {
"Binds": [
"/lib/modules:/lib/modules:ro",
"addons-642352:/var"
],
"ContainerIDFile": "",
"LogConfig": {
"Type": "json-file",
"Config": {
"max-size": "100m"
}
},
"NetworkMode": "addons-642352",
"PortBindings": {
"22/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": ""
}
],
"2376/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": ""
}
],
"32443/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": ""
}
],
"5000/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": ""
}
],
"8443/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": ""
}
]
},
"RestartPolicy": {
"Name": "no",
"MaximumRetryCount": 0
},
"AutoRemove": false,
"VolumeDriver": "",
"VolumesFrom": null,
"ConsoleSize": [
0,
0
],
"CapAdd": null,
"CapDrop": null,
"CgroupnsMode": "host",
"Dns": [],
"DnsOptions": [],
"DnsSearch": [],
"ExtraHosts": null,
"GroupAdd": null,
"IpcMode": "private",
"Cgroup": "",
"Links": null,
"OomScoreAdj": 0,
"PidMode": "",
"Privileged": true,
"PublishAllPorts": false,
"ReadonlyRootfs": false,
"SecurityOpt": [
"seccomp=unconfined",
"apparmor=unconfined",
"label=disable"
],
"Tmpfs": {
"/run": "",
"/tmp": ""
},
"UTSMode": "",
"UsernsMode": "",
"ShmSize": 67108864,
"Runtime": "runc",
"Isolation": "",
"CpuShares": 0,
"Memory": 4194304000,
"NanoCpus": 2000000000,
"CgroupParent": "",
"BlkioWeight": 0,
"BlkioWeightDevice": [],
"BlkioDeviceReadBps": [],
"BlkioDeviceWriteBps": [],
"BlkioDeviceReadIOps": [],
"BlkioDeviceWriteIOps": [],
"CpuPeriod": 0,
"CpuQuota": 0,
"CpuRealtimePeriod": 0,
"CpuRealtimeRuntime": 0,
"CpusetCpus": "",
"CpusetMems": "",
"Devices": [],
"DeviceCgroupRules": null,
"DeviceRequests": null,
"MemoryReservation": 0,
"MemorySwap": 8388608000,
"MemorySwappiness": null,
"OomKillDisable": false,
"PidsLimit": null,
"Ulimits": [],
"CpuCount": 0,
"CpuPercent": 0,
"IOMaximumIOps": 0,
"IOMaximumBandwidth": 0,
"MaskedPaths": null,
"ReadonlyPaths": null
},
"GraphDriver": {
"Data": {
"LowerDir": "/var/lib/docker/overlay2/08232dba750632eb38bb0b2fe50754717cd0b14f441b235797b8891b71e7a73d-init/diff:/var/lib/docker/overlay2/118cd56b7cf3f8f98e5d06fe937de6e8b842264a59a088dbb73626cf7e05fed3/diff",
"MergedDir": "/var/lib/docker/overlay2/08232dba750632eb38bb0b2fe50754717cd0b14f441b235797b8891b71e7a73d/merged",
"UpperDir": "/var/lib/docker/overlay2/08232dba750632eb38bb0b2fe50754717cd0b14f441b235797b8891b71e7a73d/diff",
"WorkDir": "/var/lib/docker/overlay2/08232dba750632eb38bb0b2fe50754717cd0b14f441b235797b8891b71e7a73d/work"
},
"Name": "overlay2"
},
"Mounts": [
{
"Type": "bind",
"Source": "/lib/modules",
"Destination": "/lib/modules",
"Mode": "ro",
"RW": false,
"Propagation": "rprivate"
},
{
"Type": "volume",
"Name": "addons-642352",
"Source": "/var/lib/docker/volumes/addons-642352/_data",
"Destination": "/var",
"Driver": "local",
"Mode": "z",
"RW": true,
"Propagation": ""
}
],
"Config": {
"Hostname": "addons-642352",
"Domainname": "",
"User": "",
"AttachStdin": false,
"AttachStdout": false,
"AttachStderr": false,
"ExposedPorts": {
"22/tcp": {},
"2376/tcp": {},
"32443/tcp": {},
"5000/tcp": {},
"8443/tcp": {}
},
"Tty": true,
"OpenStdin": false,
"StdinOnce": false,
"Env": [
"container=docker",
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
],
"Cmd": null,
"Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.42-1704759386-17866@sha256:8c3c33047f9bc285e1f5f2a5aa14744a2fe04c58478f02f77b06169dea8dd3f0",
"Volumes": null,
"WorkingDir": "/",
"Entrypoint": [
"/usr/local/bin/entrypoint",
"/sbin/init"
],
"MacAddress": "02:42:c0:a8:31:02",
"OnBuild": null,
"Labels": {
"created_by.minikube.sigs.k8s.io": "true",
"mode.minikube.sigs.k8s.io": "addons-642352",
"name.minikube.sigs.k8s.io": "addons-642352",
"role.minikube.sigs.k8s.io": ""
},
"StopSignal": "SIGRTMIN+3"
},
"NetworkSettings": {
"Bridge": "",
"SandboxID": "975dad5884d284464d7166c56e88780b141347c6563aa24ce5ca668f94dfc9b1",
"SandboxKey": "/var/run/docker/netns/975dad5884d2",
"Ports": {
"22/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": "34031"
}
],
"2376/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": "34030"
}
],
"32443/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": "34027"
}
],
"5000/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": "34029"
}
],
"8443/tcp": [
{
"HostIp": "127.0.0.1",
"HostPort": "34028"
}
]
},
"HairpinMode": false,
"LinkLocalIPv6Address": "",
"LinkLocalIPv6PrefixLen": 0,
"SecondaryIPAddresses": null,
"SecondaryIPv6Addresses": null,
"EndpointID": "",
"Gateway": "",
"GlobalIPv6Address": "",
"GlobalIPv6PrefixLen": 0,
"IPAddress": "",
"IPPrefixLen": 0,
"IPv6Gateway": "",
"MacAddress": "",
"Networks": {
"addons-642352": {
"IPAMConfig": {
"IPv4Address": "192.168.49.2"
},
"Links": null,
"Aliases": [
"ba9aca09f642",
"addons-642352"
],
"MacAddress": "02:42:c0:a8:31:02",
"NetworkID": "3fb4120869864e9d72f1805b9a71b8e8b6af9ce94c7f797f8fe13608be3baf92",
"EndpointID": "a9ae79bda69855100bb8f30040eeed282c5decccef932715a0120a4c8769d354",
"Gateway": "192.168.49.1",
"IPAddress": "192.168.49.2",
"IPPrefixLen": 24,
"IPv6Gateway": "",
"GlobalIPv6Address": "",
"GlobalIPv6PrefixLen": 0,
"DriverOpts": null,
"DNSNames": [
"addons-642352",
"ba9aca09f642"
]
}
}
}
}
]
-- /stdout --
helpers_test.go:239: (dbg) Run: out/minikube-linux-amd64 status --format={{.Host}} -p addons-642352 -n addons-642352
helpers_test.go:244: <<< TestAddons/parallel/Ingress FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======> post-mortem[TestAddons/parallel/Ingress]: minikube logs <======
helpers_test.go:247: (dbg) Run: out/minikube-linux-amd64 -p addons-642352 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-amd64 -p addons-642352 logs -n 25: (1.282192008s)
helpers_test.go:252: TestAddons/parallel/Ingress logs:
-- stdout --
==> Audit <==
|---------|---------------------------------------------------------------------------------------------|------------------------|---------|---------|---------------------|---------------------|
| Command | Args | Profile | User | Version | Start Time | End Time |
|---------|---------------------------------------------------------------------------------------------|------------------------|---------|---------|---------------------|---------------------|
| delete | -p download-only-625877 | download-only-625877 | jenkins | v1.32.0 | 01 Feb 24 09:08 UTC | 01 Feb 24 09:08 UTC |
| delete | -p download-only-057828 | download-only-057828 | jenkins | v1.32.0 | 01 Feb 24 09:08 UTC | 01 Feb 24 09:08 UTC |
| start | --download-only -p | download-docker-452662 | jenkins | v1.32.0 | 01 Feb 24 09:08 UTC | |
| | download-docker-452662 | | | | | |
| | --alsologtostderr | | | | | |
| | --driver=docker | | | | | |
| | --container-runtime=crio | | | | | |
| delete | -p download-docker-452662 | download-docker-452662 | jenkins | v1.32.0 | 01 Feb 24 09:08 UTC | 01 Feb 24 09:08 UTC |
| start | --download-only -p | binary-mirror-807134 | jenkins | v1.32.0 | 01 Feb 24 09:08 UTC | |
| | binary-mirror-807134 | | | | | |
| | --alsologtostderr | | | | | |
| | --binary-mirror | | | | | |
| | http://127.0.0.1:34923 | | | | | |
| | --driver=docker | | | | | |
| | --container-runtime=crio | | | | | |
| delete | -p binary-mirror-807134 | binary-mirror-807134 | jenkins | v1.32.0 | 01 Feb 24 09:08 UTC | 01 Feb 24 09:08 UTC |
| addons | enable dashboard -p | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:08 UTC | |
| | addons-642352 | | | | | |
| addons | disable dashboard -p | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:08 UTC | |
| | addons-642352 | | | | | |
| start | -p addons-642352 --wait=true | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:08 UTC | 01 Feb 24 09:11 UTC |
| | --memory=4000 --alsologtostderr | | | | | |
| | --addons=registry | | | | | |
| | --addons=metrics-server | | | | | |
| | --addons=volumesnapshots | | | | | |
| | --addons=csi-hostpath-driver | | | | | |
| | --addons=gcp-auth | | | | | |
| | --addons=cloud-spanner | | | | | |
| | --addons=inspektor-gadget | | | | | |
| | --addons=storage-provisioner-rancher | | | | | |
| | --addons=nvidia-device-plugin | | | | | |
| | --addons=yakd --driver=docker | | | | | |
| | --container-runtime=crio | | | | | |
| | --addons=ingress | | | | | |
| | --addons=ingress-dns | | | | | |
| | --addons=helm-tiller | | | | | |
| addons | disable inspektor-gadget -p | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:11 UTC | 01 Feb 24 09:11 UTC |
| | addons-642352 | | | | | |
| ip | addons-642352 ip | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:11 UTC | 01 Feb 24 09:11 UTC |
| addons | addons-642352 addons disable | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:11 UTC | 01 Feb 24 09:11 UTC |
| | registry --alsologtostderr | | | | | |
| | -v=1 | | | | | |
| ssh | addons-642352 ssh curl -s | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:11 UTC | |
| | http://127.0.0.1/ -H 'Host: | | | | | |
| | nginx.example.com' | | | | | |
| addons | disable nvidia-device-plugin | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:11 UTC | 01 Feb 24 09:11 UTC |
| | -p addons-642352 | | | | | |
| addons | disable cloud-spanner -p | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:11 UTC | 01 Feb 24 09:11 UTC |
| | addons-642352 | | | | | |
| addons | enable headlamp | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:11 UTC | 01 Feb 24 09:11 UTC |
| | -p addons-642352 | | | | | |
| | --alsologtostderr -v=1 | | | | | |
| ssh | addons-642352 ssh cat | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:11 UTC | 01 Feb 24 09:11 UTC |
| | /opt/local-path-provisioner/pvc-5a4495e1-0e0a-490e-9234-87dcffee5021_default_test-pvc/file1 | | | | | |
| addons | addons-642352 addons disable | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:11 UTC | 01 Feb 24 09:12 UTC |
| | storage-provisioner-rancher | | | | | |
| | --alsologtostderr -v=1 | | | | | |
| addons | addons-642352 addons disable | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:11 UTC | 01 Feb 24 09:11 UTC |
| | helm-tiller --alsologtostderr | | | | | |
| | -v=1 | | | | | |
| addons | addons-642352 addons | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:12 UTC | 01 Feb 24 09:12 UTC |
| | disable metrics-server | | | | | |
| | --alsologtostderr -v=1 | | | | | |
| addons | addons-642352 addons | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:12 UTC | 01 Feb 24 09:12 UTC |
| | disable csi-hostpath-driver | | | | | |
| | --alsologtostderr -v=1 | | | | | |
| addons | addons-642352 addons | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:12 UTC | 01 Feb 24 09:12 UTC |
| | disable volumesnapshots | | | | | |
| | --alsologtostderr -v=1 | | | | | |
| ip | addons-642352 ip | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:13 UTC | 01 Feb 24 09:13 UTC |
| addons | addons-642352 addons disable | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:13 UTC | 01 Feb 24 09:13 UTC |
| | ingress-dns --alsologtostderr | | | | | |
| | -v=1 | | | | | |
| addons | addons-642352 addons disable | addons-642352 | jenkins | v1.32.0 | 01 Feb 24 09:13 UTC | 01 Feb 24 09:13 UTC |
| | ingress --alsologtostderr -v=1 | | | | | |
|---------|---------------------------------------------------------------------------------------------|------------------------|---------|---------|---------------------|---------------------|
==> Last Start <==
Log file created at: 2024/02/01 09:08:51
Running on machine: ubuntu-20-agent
Binary: Built with gc go1.21.6 for linux/amd64
Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
I0201 09:08:51.669670 961265 out.go:296] Setting OutFile to fd 1 ...
I0201 09:08:51.669954 961265 out.go:343] TERM=,COLORTERM=, which probably does not support color
I0201 09:08:51.669964 961265 out.go:309] Setting ErrFile to fd 2...
I0201 09:08:51.669969 961265 out.go:343] TERM=,COLORTERM=, which probably does not support color
I0201 09:08:51.670158 961265 root.go:338] Updating PATH: /home/jenkins/minikube-integration/18051-952908/.minikube/bin
I0201 09:08:51.670877 961265 out.go:303] Setting JSON to false
I0201 09:08:51.671863 961265 start.go:128] hostinfo: {"hostname":"ubuntu-20-agent","uptime":57079,"bootTime":1706721453,"procs":172,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1049-gcp","kernelArch":"x86_64","virtualizationSystem":"kvm","virtualizationRole":"guest","hostId":"591c9f12-2938-3743-e2bf-c56a050d43d1"}
I0201 09:08:51.671947 961265 start.go:138] virtualization: kvm guest
I0201 09:08:51.674252 961265 out.go:177] * [addons-642352] minikube v1.32.0 on Ubuntu 20.04 (kvm/amd64)
I0201 09:08:51.675817 961265 out.go:177] - MINIKUBE_LOCATION=18051
I0201 09:08:51.675803 961265 notify.go:220] Checking for updates...
I0201 09:08:51.677509 961265 out.go:177] - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
I0201 09:08:51.679149 961265 out.go:177] - KUBECONFIG=/home/jenkins/minikube-integration/18051-952908/kubeconfig
I0201 09:08:51.680682 961265 out.go:177] - MINIKUBE_HOME=/home/jenkins/minikube-integration/18051-952908/.minikube
I0201 09:08:51.682026 961265 out.go:177] - MINIKUBE_BIN=out/minikube-linux-amd64
I0201 09:08:51.683487 961265 out.go:177] - MINIKUBE_FORCE_SYSTEMD=
I0201 09:08:51.685035 961265 driver.go:392] Setting default libvirt URI to qemu:///system
I0201 09:08:51.706959 961265 docker.go:122] docker version: linux-25.0.2:Docker Engine - Community
I0201 09:08:51.707102 961265 cli_runner.go:164] Run: docker system info --format "{{json .}}"
I0201 09:08:51.762926 961265 info.go:266] docker info: {ID:TS6T:UINC:MIYS:RZPA:KS6T:4JQK:7JHN:D6RA:LDP2:MHAE:G32M:C5NQ Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:31 OomKillDisable:true NGoroutines:51 SystemTime:2024-02-01 09:08:51.75007396 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1049-gcp OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:x86_
64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:8 MemTotal:33648050176 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ubuntu-20-agent Labels:[] ExperimentalBuild:false ServerVersion:25.0.2 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae07eda36dd25f8a1b98dfbf587313b99c0190bb Expected:ae07eda36dd25f8a1b98dfbf587313b99c0190bb} RuncCommit:{ID:v1.1.12-0-g51d5e94 Expected:v1.1.12-0-g51d5e94} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErrors
:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.12.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.24.5] map[Name:scan Path:/usr/libexec/docker/cli-plugins/docker-scan SchemaVersion:0.1.0 ShortDescription:Docker Scan Vendor:Docker Inc. Version:v0.23.0]] Warnings:<nil>}}
I0201 09:08:51.763054 961265 docker.go:295] overlay module found
I0201 09:08:51.766090 961265 out.go:177] * Using the docker driver based on user configuration
I0201 09:08:51.768467 961265 start.go:298] selected driver: docker
I0201 09:08:51.768497 961265 start.go:902] validating driver "docker" against <nil>
I0201 09:08:51.768513 961265 start.go:913] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
I0201 09:08:51.769404 961265 cli_runner.go:164] Run: docker system info --format "{{json .}}"
I0201 09:08:51.822224 961265 info.go:266] docker info: {ID:TS6T:UINC:MIYS:RZPA:KS6T:4JQK:7JHN:D6RA:LDP2:MHAE:G32M:C5NQ Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:31 OomKillDisable:true NGoroutines:51 SystemTime:2024-02-01 09:08:51.812496942 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1049-gcp OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:x86
_64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:8 MemTotal:33648050176 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ubuntu-20-agent Labels:[] ExperimentalBuild:false ServerVersion:25.0.2 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae07eda36dd25f8a1b98dfbf587313b99c0190bb Expected:ae07eda36dd25f8a1b98dfbf587313b99c0190bb} RuncCommit:{ID:v1.1.12-0-g51d5e94 Expected:v1.1.12-0-g51d5e94} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerError
s:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.12.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.24.5] map[Name:scan Path:/usr/libexec/docker/cli-plugins/docker-scan SchemaVersion:0.1.0 ShortDescription:Docker Scan Vendor:Docker Inc. Version:v0.23.0]] Warnings:<nil>}}
I0201 09:08:51.822412 961265 start_flags.go:307] no existing cluster config was found, will generate one from the flags
I0201 09:08:51.822638 961265 start_flags.go:927] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
I0201 09:08:51.824231 961265 out.go:177] * Using Docker driver with root privileges
I0201 09:08:51.825641 961265 cni.go:84] Creating CNI manager for ""
I0201 09:08:51.825663 961265 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
I0201 09:08:51.825675 961265 start_flags.go:316] Found "CNI" CNI - setting NetworkPlugin=cni
I0201 09:08:51.825691 961265 start_flags.go:321] config:
{Name:addons-642352 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.42-1704759386-17866@sha256:8c3c33047f9bc285e1f5f2a5aa14744a2fe04c58478f02f77b06169dea8dd3f0 Memory:4000 CPUs:2 DiskSize:20000 VMDriver: Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:0 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.28.4 ClusterName:addons-642352 Namespace:default APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRI
Socket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI: NodeIP: NodePort:8443 NodeName:} Nodes:[] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs:}
I0201 09:08:51.827405 961265 out.go:177] * Starting control plane node addons-642352 in cluster addons-642352
I0201 09:08:51.828745 961265 cache.go:121] Beginning downloading kic base image for docker with crio
I0201 09:08:51.830158 961265 out.go:177] * Pulling base image v0.0.42-1704759386-17866 ...
I0201 09:08:51.831552 961265 preload.go:132] Checking if preload exists for k8s version v1.28.4 and runtime crio
I0201 09:08:51.831605 961265 preload.go:148] Found local preload: /home/jenkins/minikube-integration/18051-952908/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.28.4-cri-o-overlay-amd64.tar.lz4
I0201 09:08:51.831620 961265 cache.go:56] Caching tarball of preloaded images
I0201 09:08:51.831630 961265 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.42-1704759386-17866@sha256:8c3c33047f9bc285e1f5f2a5aa14744a2fe04c58478f02f77b06169dea8dd3f0 in local docker daemon
I0201 09:08:51.831734 961265 preload.go:174] Found /home/jenkins/minikube-integration/18051-952908/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.28.4-cri-o-overlay-amd64.tar.lz4 in cache, skipping download
I0201 09:08:51.831756 961265 cache.go:59] Finished verifying existence of preloaded tar for v1.28.4 on crio
I0201 09:08:51.832116 961265 profile.go:148] Saving config to /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/config.json ...
I0201 09:08:51.832147 961265 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/config.json: {Name:mk506e4fa5282228eed2a690e4dcba8c71c2e923 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I0201 09:08:51.847740 961265 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.42-1704759386-17866@sha256:8c3c33047f9bc285e1f5f2a5aa14744a2fe04c58478f02f77b06169dea8dd3f0 to local cache
I0201 09:08:51.847899 961265 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.42-1704759386-17866@sha256:8c3c33047f9bc285e1f5f2a5aa14744a2fe04c58478f02f77b06169dea8dd3f0 in local cache directory
I0201 09:08:51.847921 961265 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.42-1704759386-17866@sha256:8c3c33047f9bc285e1f5f2a5aa14744a2fe04c58478f02f77b06169dea8dd3f0 in local cache directory, skipping pull
I0201 09:08:51.847928 961265 image.go:105] gcr.io/k8s-minikube/kicbase-builds:v0.0.42-1704759386-17866@sha256:8c3c33047f9bc285e1f5f2a5aa14744a2fe04c58478f02f77b06169dea8dd3f0 exists in cache, skipping pull
I0201 09:08:51.847942 961265 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.42-1704759386-17866@sha256:8c3c33047f9bc285e1f5f2a5aa14744a2fe04c58478f02f77b06169dea8dd3f0 as a tarball
I0201 09:08:51.847954 961265 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.42-1704759386-17866@sha256:8c3c33047f9bc285e1f5f2a5aa14744a2fe04c58478f02f77b06169dea8dd3f0 from local cache
I0201 09:09:03.392056 961265 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.42-1704759386-17866@sha256:8c3c33047f9bc285e1f5f2a5aa14744a2fe04c58478f02f77b06169dea8dd3f0 from cached tarball
I0201 09:09:03.392097 961265 cache.go:194] Successfully downloaded all kic artifacts
I0201 09:09:03.392145 961265 start.go:365] acquiring machines lock for addons-642352: {Name:mk0329e506b4aa0b70097346accee9e5da4e37de Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
I0201 09:09:03.392253 961265 start.go:369] acquired machines lock for "addons-642352" in 85.167µs
I0201 09:09:03.392276 961265 start.go:93] Provisioning new machine with config: &{Name:addons-642352 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.42-1704759386-17866@sha256:8c3c33047f9bc285e1f5f2a5aa14744a2fe04c58478f02f77b06169dea8dd3f0 Memory:4000 CPUs:2 DiskSize:20000 VMDriver: Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:0 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.28.4 ClusterName:addons-642352 Namespace:default APIServerName:minikubeCA A
PIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI: NodeIP: NodePort:8443 NodeName:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.28.4 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false Disabl
eMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs:} &{Name: IP: Port:8443 KubernetesVersion:v1.28.4 ContainerRuntime:crio ControlPlane:true Worker:true}
I0201 09:09:03.392394 961265 start.go:125] createHost starting for "" (driver="docker")
I0201 09:09:03.394535 961265 out.go:204] * Creating docker container (CPUs=2, Memory=4000MB) ...
I0201 09:09:03.394797 961265 start.go:159] libmachine.API.Create for "addons-642352" (driver="docker")
I0201 09:09:03.394825 961265 client.go:168] LocalClient.Create starting
I0201 09:09:03.394986 961265 main.go:141] libmachine: Creating CA: /home/jenkins/minikube-integration/18051-952908/.minikube/certs/ca.pem
I0201 09:09:03.568980 961265 main.go:141] libmachine: Creating client certificate: /home/jenkins/minikube-integration/18051-952908/.minikube/certs/cert.pem
I0201 09:09:03.674325 961265 cli_runner.go:164] Run: docker network inspect addons-642352 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
W0201 09:09:03.690523 961265 cli_runner.go:211] docker network inspect addons-642352 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
I0201 09:09:03.690604 961265 network_create.go:281] running [docker network inspect addons-642352] to gather additional debugging logs...
I0201 09:09:03.690644 961265 cli_runner.go:164] Run: docker network inspect addons-642352
W0201 09:09:03.706562 961265 cli_runner.go:211] docker network inspect addons-642352 returned with exit code 1
I0201 09:09:03.706601 961265 network_create.go:284] error running [docker network inspect addons-642352]: docker network inspect addons-642352: exit status 1
stdout:
[]
stderr:
Error response from daemon: network addons-642352 not found
I0201 09:09:03.706614 961265 network_create.go:286] output of [docker network inspect addons-642352]: -- stdout --
[]
-- /stdout --
** stderr **
Error response from daemon: network addons-642352 not found
** /stderr **
I0201 09:09:03.706723 961265 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
I0201 09:09:03.724897 961265 network.go:209] using free private subnet 192.168.49.0/24: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0xc00206ec80}
I0201 09:09:03.724955 961265 network_create.go:124] attempt to create docker network addons-642352 192.168.49.0/24 with gateway 192.168.49.1 and MTU of 1500 ...
I0201 09:09:03.725096 961265 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.49.0/24 --gateway=192.168.49.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=addons-642352 addons-642352
I0201 09:09:03.785501 961265 network_create.go:108] docker network addons-642352 192.168.49.0/24 created
I0201 09:09:03.785534 961265 kic.go:121] calculated static IP "192.168.49.2" for the "addons-642352" container
I0201 09:09:03.785604 961265 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
I0201 09:09:03.801814 961265 cli_runner.go:164] Run: docker volume create addons-642352 --label name.minikube.sigs.k8s.io=addons-642352 --label created_by.minikube.sigs.k8s.io=true
I0201 09:09:03.819955 961265 oci.go:103] Successfully created a docker volume addons-642352
I0201 09:09:03.820041 961265 cli_runner.go:164] Run: docker run --rm --name addons-642352-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-642352 --entrypoint /usr/bin/test -v addons-642352:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.42-1704759386-17866@sha256:8c3c33047f9bc285e1f5f2a5aa14744a2fe04c58478f02f77b06169dea8dd3f0 -d /var/lib
I0201 09:09:07.606044 961265 cli_runner.go:217] Completed: docker run --rm --name addons-642352-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-642352 --entrypoint /usr/bin/test -v addons-642352:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.42-1704759386-17866@sha256:8c3c33047f9bc285e1f5f2a5aa14744a2fe04c58478f02f77b06169dea8dd3f0 -d /var/lib: (3.785961607s)
I0201 09:09:07.606078 961265 oci.go:107] Successfully prepared a docker volume addons-642352
I0201 09:09:07.606117 961265 preload.go:132] Checking if preload exists for k8s version v1.28.4 and runtime crio
I0201 09:09:07.606145 961265 kic.go:194] Starting extracting preloaded images to volume ...
I0201 09:09:07.606211 961265 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/18051-952908/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.28.4-cri-o-overlay-amd64.tar.lz4:/preloaded.tar:ro -v addons-642352:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.42-1704759386-17866@sha256:8c3c33047f9bc285e1f5f2a5aa14744a2fe04c58478f02f77b06169dea8dd3f0 -I lz4 -xf /preloaded.tar -C /extractDir
I0201 09:09:12.826375 961265 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/18051-952908/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.28.4-cri-o-overlay-amd64.tar.lz4:/preloaded.tar:ro -v addons-642352:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.42-1704759386-17866@sha256:8c3c33047f9bc285e1f5f2a5aa14744a2fe04c58478f02f77b06169dea8dd3f0 -I lz4 -xf /preloaded.tar -C /extractDir: (5.220115303s)
I0201 09:09:12.826432 961265 kic.go:203] duration metric: took 5.220283 seconds to extract preloaded images to volume
W0201 09:09:12.826595 961265 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
I0201 09:09:12.826719 961265 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
I0201 09:09:12.878126 961265 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname addons-642352 --name addons-642352 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-642352 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=addons-642352 --network addons-642352 --ip 192.168.49.2 --volume addons-642352:/var --security-opt apparmor=unconfined --memory=4000mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.42-1704759386-17866@sha256:8c3c33047f9bc285e1f5f2a5aa14744a2fe04c58478f02f77b06169dea8dd3f0
I0201 09:09:13.209618 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Running}}
I0201 09:09:13.227279 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:13.245549 961265 cli_runner.go:164] Run: docker exec addons-642352 stat /var/lib/dpkg/alternatives/iptables
I0201 09:09:13.289307 961265 oci.go:144] the created container "addons-642352" has a running status.
I0201 09:09:13.289339 961265 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa...
I0201 09:09:13.403981 961265 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
I0201 09:09:13.425549 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:13.445754 961265 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
I0201 09:09:13.445780 961265 kic_runner.go:114] Args: [docker exec --privileged addons-642352 chown docker:docker /home/docker/.ssh/authorized_keys]
I0201 09:09:13.490199 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:13.510118 961265 machine.go:88] provisioning docker machine ...
I0201 09:09:13.510176 961265 ubuntu.go:169] provisioning hostname "addons-642352"
I0201 09:09:13.510251 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:13.527852 961265 main.go:141] libmachine: Using SSH client type: native
I0201 09:09:13.528230 961265 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x80a920] 0x80d600 <nil> [] 0s} 127.0.0.1 34031 <nil> <nil>}
I0201 09:09:13.528253 961265 main.go:141] libmachine: About to run SSH command:
sudo hostname addons-642352 && echo "addons-642352" | sudo tee /etc/hostname
I0201 09:09:13.528867 961265 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: read tcp 127.0.0.1:50534->127.0.0.1:34031: read: connection reset by peer
I0201 09:09:16.678729 961265 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-642352
I0201 09:09:16.678830 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:16.695704 961265 main.go:141] libmachine: Using SSH client type: native
I0201 09:09:16.696081 961265 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x80a920] 0x80d600 <nil> [] 0s} 127.0.0.1 34031 <nil> <nil>}
I0201 09:09:16.696101 961265 main.go:141] libmachine: About to run SSH command:
if ! grep -xq '.*\saddons-642352' /etc/hosts; then
if grep -xq '127.0.1.1\s.*' /etc/hosts; then
sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 addons-642352/g' /etc/hosts;
else
echo '127.0.1.1 addons-642352' | sudo tee -a /etc/hosts;
fi
fi
I0201 09:09:16.830889 961265 main.go:141] libmachine: SSH cmd err, output: <nil>:
I0201 09:09:16.830937 961265 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/18051-952908/.minikube CaCertPath:/home/jenkins/minikube-integration/18051-952908/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/18051-952908/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/18051-952908/.minikube}
I0201 09:09:16.830962 961265 ubuntu.go:177] setting up certificates
I0201 09:09:16.830973 961265 provision.go:83] configureAuth start
I0201 09:09:16.831031 961265 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-642352
I0201 09:09:16.847953 961265 provision.go:138] copyHostCerts
I0201 09:09:16.848027 961265 exec_runner.go:151] cp: /home/jenkins/minikube-integration/18051-952908/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/18051-952908/.minikube/ca.pem (1078 bytes)
I0201 09:09:16.848147 961265 exec_runner.go:151] cp: /home/jenkins/minikube-integration/18051-952908/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/18051-952908/.minikube/cert.pem (1123 bytes)
I0201 09:09:16.848206 961265 exec_runner.go:151] cp: /home/jenkins/minikube-integration/18051-952908/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/18051-952908/.minikube/key.pem (1675 bytes)
I0201 09:09:16.848255 961265 provision.go:112] generating server cert: /home/jenkins/minikube-integration/18051-952908/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/18051-952908/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/18051-952908/.minikube/certs/ca-key.pem org=jenkins.addons-642352 san=[192.168.49.2 127.0.0.1 localhost 127.0.0.1 minikube addons-642352]
I0201 09:09:16.935014 961265 provision.go:172] copyRemoteCerts
I0201 09:09:16.935075 961265 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
I0201 09:09:16.935115 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:16.951703 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:17.046857 961265 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18051-952908/.minikube/machines/server.pem --> /etc/docker/server.pem (1216 bytes)
I0201 09:09:17.068098 961265 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18051-952908/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
I0201 09:09:17.088978 961265 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18051-952908/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
I0201 09:09:17.110156 961265 provision.go:86] duration metric: configureAuth took 279.168949ms
I0201 09:09:17.110183 961265 ubuntu.go:193] setting minikube options for container-runtime
I0201 09:09:17.110386 961265 config.go:182] Loaded profile config "addons-642352": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.28.4
I0201 09:09:17.110543 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:17.126712 961265 main.go:141] libmachine: Using SSH client type: native
I0201 09:09:17.127039 961265 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x80a920] 0x80d600 <nil> [] 0s} 127.0.0.1 34031 <nil> <nil>}
I0201 09:09:17.127057 961265 main.go:141] libmachine: About to run SSH command:
sudo mkdir -p /etc/sysconfig && printf %!s(MISSING) "
CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
" | sudo tee /etc/sysconfig/crio.minikube && sudo systemctl restart crio
I0201 09:09:17.347745 961265 main.go:141] libmachine: SSH cmd err, output: <nil>:
CRIO_MINIKUBE_OPTIONS='--insecure-registry 10.96.0.0/12 '
I0201 09:09:17.347773 961265 machine.go:91] provisioned docker machine in 3.837629984s
I0201 09:09:17.347786 961265 client.go:171] LocalClient.Create took 13.95295104s
I0201 09:09:17.347810 961265 start.go:167] duration metric: libmachine.API.Create for "addons-642352" took 13.953013348s
I0201 09:09:17.347824 961265 start.go:300] post-start starting for "addons-642352" (driver="docker")
I0201 09:09:17.347838 961265 start.go:329] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
I0201 09:09:17.347894 961265 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
I0201 09:09:17.347941 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:17.364950 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:17.459781 961265 ssh_runner.go:195] Run: cat /etc/os-release
I0201 09:09:17.463105 961265 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
I0201 09:09:17.463152 961265 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
I0201 09:09:17.463167 961265 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
I0201 09:09:17.463177 961265 info.go:137] Remote host: Ubuntu 22.04.3 LTS
I0201 09:09:17.463191 961265 filesync.go:126] Scanning /home/jenkins/minikube-integration/18051-952908/.minikube/addons for local assets ...
I0201 09:09:17.463259 961265 filesync.go:126] Scanning /home/jenkins/minikube-integration/18051-952908/.minikube/files for local assets ...
I0201 09:09:17.463291 961265 start.go:303] post-start completed in 115.458192ms
I0201 09:09:17.463575 961265 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-642352
I0201 09:09:17.480363 961265 profile.go:148] Saving config to /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/config.json ...
I0201 09:09:17.480640 961265 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
I0201 09:09:17.480695 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:17.497425 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:17.587367 961265 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
I0201 09:09:17.591772 961265 start.go:128] duration metric: createHost completed in 14.199360487s
I0201 09:09:17.591799 961265 start.go:83] releasing machines lock for "addons-642352", held for 14.199536094s
I0201 09:09:17.591869 961265 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-642352
I0201 09:09:17.608601 961265 ssh_runner.go:195] Run: cat /version.json
I0201 09:09:17.608646 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:17.608670 961265 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
I0201 09:09:17.608764 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:17.627687 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:17.627789 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:17.813483 961265 ssh_runner.go:195] Run: systemctl --version
I0201 09:09:17.817796 961265 ssh_runner.go:195] Run: sudo sh -c "podman version >/dev/null"
I0201 09:09:17.955033 961265 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
I0201 09:09:17.960639 961265 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "sudo mv {} {}.mk_disabled" ;
I0201 09:09:17.979028 961265 cni.go:221] loopback cni configuration disabled: "/etc/cni/net.d/*loopback.conf*" found
I0201 09:09:17.979119 961265 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%!p(MISSING), " -exec sh -c "sudo mv {} {}.mk_disabled" ;
I0201 09:09:18.005958 961265 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
I0201 09:09:18.005983 961265 start.go:475] detecting cgroup driver to use...
I0201 09:09:18.006016 961265 detect.go:196] detected "cgroupfs" cgroup driver on host os
I0201 09:09:18.006058 961265 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
I0201 09:09:18.020498 961265 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
I0201 09:09:18.031323 961265 docker.go:217] disabling cri-docker service (if available) ...
I0201 09:09:18.031403 961265 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
I0201 09:09:18.044317 961265 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
I0201 09:09:18.057191 961265 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
I0201 09:09:18.131732 961265 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
I0201 09:09:18.207232 961265 docker.go:233] disabling docker service ...
I0201 09:09:18.207316 961265 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
I0201 09:09:18.226453 961265 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
I0201 09:09:18.236959 961265 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
I0201 09:09:18.315422 961265 ssh_runner.go:195] Run: sudo systemctl mask docker.service
I0201 09:09:18.395960 961265 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
I0201 09:09:18.406512 961265 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %!s(MISSING) "runtime-endpoint: unix:///var/run/crio/crio.sock
" | sudo tee /etc/crictl.yaml"
I0201 09:09:18.420730 961265 crio.go:59] configure cri-o to use "registry.k8s.io/pause:3.9" pause image...
I0201 09:09:18.420792 961265 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*pause_image = .*$|pause_image = "registry.k8s.io/pause:3.9"|' /etc/crio/crio.conf.d/02-crio.conf"
I0201 09:09:18.429621 961265 crio.go:70] configuring cri-o to use "cgroupfs" as cgroup driver...
I0201 09:09:18.429683 961265 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|^.*cgroup_manager = .*$|cgroup_manager = "cgroupfs"|' /etc/crio/crio.conf.d/02-crio.conf"
I0201 09:09:18.438390 961265 ssh_runner.go:195] Run: sh -c "sudo sed -i '/conmon_cgroup = .*/d' /etc/crio/crio.conf.d/02-crio.conf"
I0201 09:09:18.447103 961265 ssh_runner.go:195] Run: sh -c "sudo sed -i '/cgroup_manager = .*/a conmon_cgroup = "pod"' /etc/crio/crio.conf.d/02-crio.conf"
I0201 09:09:18.456037 961265 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
I0201 09:09:18.464499 961265 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
I0201 09:09:18.472274 961265 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
I0201 09:09:18.480144 961265 ssh_runner.go:195] Run: sudo systemctl daemon-reload
I0201 09:09:18.551612 961265 ssh_runner.go:195] Run: sudo systemctl restart crio
I0201 09:09:18.660132 961265 start.go:522] Will wait 60s for socket path /var/run/crio/crio.sock
I0201 09:09:18.660225 961265 ssh_runner.go:195] Run: stat /var/run/crio/crio.sock
I0201 09:09:18.663686 961265 start.go:543] Will wait 60s for crictl version
I0201 09:09:18.663733 961265 ssh_runner.go:195] Run: which crictl
I0201 09:09:18.666982 961265 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
I0201 09:09:18.702079 961265 start.go:559] Version: 0.1.0
RuntimeName: cri-o
RuntimeVersion: 1.24.6
RuntimeApiVersion: v1
I0201 09:09:18.702186 961265 ssh_runner.go:195] Run: crio --version
I0201 09:09:18.740106 961265 ssh_runner.go:195] Run: crio --version
I0201 09:09:18.778328 961265 out.go:177] * Preparing Kubernetes v1.28.4 on CRI-O 1.24.6 ...
I0201 09:09:18.780157 961265 cli_runner.go:164] Run: docker network inspect addons-642352 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
I0201 09:09:18.797471 961265 ssh_runner.go:195] Run: grep 192.168.49.1 host.minikube.internal$ /etc/hosts
I0201 09:09:18.801282 961265 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1 host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
I0201 09:09:18.811912 961265 preload.go:132] Checking if preload exists for k8s version v1.28.4 and runtime crio
I0201 09:09:18.811969 961265 ssh_runner.go:195] Run: sudo crictl images --output json
I0201 09:09:18.867057 961265 crio.go:496] all images are preloaded for cri-o runtime.
I0201 09:09:18.867084 961265 crio.go:415] Images already preloaded, skipping extraction
I0201 09:09:18.867140 961265 ssh_runner.go:195] Run: sudo crictl images --output json
I0201 09:09:18.899554 961265 crio.go:496] all images are preloaded for cri-o runtime.
I0201 09:09:18.899584 961265 cache_images.go:84] Images are preloaded, skipping loading
I0201 09:09:18.899653 961265 ssh_runner.go:195] Run: crio config
I0201 09:09:18.940574 961265 cni.go:84] Creating CNI manager for ""
I0201 09:09:18.940596 961265 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
I0201 09:09:18.940616 961265 kubeadm.go:87] Using pod CIDR: 10.244.0.0/16
I0201 09:09:18.940638 961265 kubeadm.go:176] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.28.4 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:addons-642352 NodeName:addons-642352 DNSDomain:cluster.local CRISocket:/var/run/crio/crio.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kuberne
tes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
I0201 09:09:18.940837 961265 kubeadm.go:181] kubeadm config:
apiVersion: kubeadm.k8s.io/v1beta3
kind: InitConfiguration
localAPIEndpoint:
advertiseAddress: 192.168.49.2
bindPort: 8443
bootstrapTokens:
- groups:
- system:bootstrappers:kubeadm:default-node-token
ttl: 24h0m0s
usages:
- signing
- authentication
nodeRegistration:
criSocket: unix:///var/run/crio/crio.sock
name: "addons-642352"
kubeletExtraArgs:
node-ip: 192.168.49.2
taints: []
---
apiVersion: kubeadm.k8s.io/v1beta3
kind: ClusterConfiguration
apiServer:
certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
extraArgs:
enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
controllerManager:
extraArgs:
allocate-node-cidrs: "true"
leader-elect: "false"
scheduler:
extraArgs:
leader-elect: "false"
certificatesDir: /var/lib/minikube/certs
clusterName: mk
controlPlaneEndpoint: control-plane.minikube.internal:8443
etcd:
local:
dataDir: /var/lib/minikube/etcd
extraArgs:
proxy-refresh-interval: "70000"
kubernetesVersion: v1.28.4
networking:
dnsDomain: cluster.local
podSubnet: "10.244.0.0/16"
serviceSubnet: 10.96.0.0/12
---
apiVersion: kubelet.config.k8s.io/v1beta1
kind: KubeletConfiguration
authentication:
x509:
clientCAFile: /var/lib/minikube/certs/ca.crt
cgroupDriver: cgroupfs
hairpinMode: hairpin-veth
runtimeRequestTimeout: 15m
clusterDomain: "cluster.local"
# disable disk resource management by default
imageGCHighThresholdPercent: 100
evictionHard:
nodefs.available: "0%!"(MISSING)
nodefs.inodesFree: "0%!"(MISSING)
imagefs.available: "0%!"(MISSING)
failSwapOn: false
staticPodPath: /etc/kubernetes/manifests
---
apiVersion: kubeproxy.config.k8s.io/v1alpha1
kind: KubeProxyConfiguration
clusterCIDR: "10.244.0.0/16"
metricsBindAddress: 0.0.0.0:10249
conntrack:
maxPerCore: 0
# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
tcpEstablishedTimeout: 0s
# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
tcpCloseWaitTimeout: 0s
I0201 09:09:18.940932 961265 kubeadm.go:976] kubelet [Unit]
Wants=crio.service
[Service]
ExecStart=
ExecStart=/var/lib/minikube/binaries/v1.28.4/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --cgroups-per-qos=false --config=/var/lib/kubelet/config.yaml --container-runtime-endpoint=unix:///var/run/crio/crio.sock --enforce-node-allocatable= --hostname-override=addons-642352 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
[Install]
config:
{KubernetesVersion:v1.28.4 ClusterName:addons-642352 Namespace:default APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI: NodeIP: NodePort:8443 NodeName:}
I0201 09:09:18.941006 961265 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.28.4
I0201 09:09:18.949429 961265 binaries.go:44] Found k8s binaries, skipping transfer
I0201 09:09:18.949499 961265 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
I0201 09:09:18.957612 961265 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (423 bytes)
I0201 09:09:18.973693 961265 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
I0201 09:09:18.989912 961265 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2094 bytes)
I0201 09:09:19.005857 961265 ssh_runner.go:195] Run: grep 192.168.49.2 control-plane.minikube.internal$ /etc/hosts
I0201 09:09:19.009056 961265 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.2 control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
I0201 09:09:19.018722 961265 certs.go:56] Setting up /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352 for IP: 192.168.49.2
I0201 09:09:19.018764 961265 certs.go:190] acquiring lock for shared ca certs: {Name:mk23a064dbf71f5683ee734795fa9d1b12119a5a Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I0201 09:09:19.018877 961265 certs.go:204] generating minikubeCA CA: /home/jenkins/minikube-integration/18051-952908/.minikube/ca.key
I0201 09:09:19.088342 961265 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18051-952908/.minikube/ca.crt ...
I0201 09:09:19.088377 961265 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18051-952908/.minikube/ca.crt: {Name:mk8450580b08f8de8f4caaabc244b0b9a3e07465 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I0201 09:09:19.088546 961265 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18051-952908/.minikube/ca.key ...
I0201 09:09:19.088559 961265 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18051-952908/.minikube/ca.key: {Name:mk91cd72630134b0a10147cff7c5d02901665741 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I0201 09:09:19.088629 961265 certs.go:204] generating proxyClientCA CA: /home/jenkins/minikube-integration/18051-952908/.minikube/proxy-client-ca.key
I0201 09:09:19.357748 961265 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18051-952908/.minikube/proxy-client-ca.crt ...
I0201 09:09:19.357783 961265 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18051-952908/.minikube/proxy-client-ca.crt: {Name:mk66b0f49e83a54bef9524ee91b65f71d36e089f Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I0201 09:09:19.357951 961265 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18051-952908/.minikube/proxy-client-ca.key ...
I0201 09:09:19.357962 961265 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18051-952908/.minikube/proxy-client-ca.key: {Name:mk6b48ad38fe9fca4cac1d28ae7c569405eab3f1 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I0201 09:09:19.358072 961265 certs.go:319] generating minikube-user signed cert: /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/client.key
I0201 09:09:19.358086 961265 crypto.go:68] Generating cert /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/client.crt with IP's: []
I0201 09:09:19.423244 961265 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/client.crt ...
I0201 09:09:19.423279 961265 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/client.crt: {Name:mk4e8d63d0bbef158801ff5e35453830c343311f Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I0201 09:09:19.423436 961265 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/client.key ...
I0201 09:09:19.423447 961265 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/client.key: {Name:mk3ae9cf645fb1aef89a6dde78e942a7398b99bc Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I0201 09:09:19.423519 961265 certs.go:319] generating minikube signed cert: /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/apiserver.key.dd3b5fb2
I0201 09:09:19.423537 961265 crypto.go:68] Generating cert /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/apiserver.crt.dd3b5fb2 with IP's: [192.168.49.2 10.96.0.1 127.0.0.1 10.0.0.1]
I0201 09:09:19.701686 961265 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/apiserver.crt.dd3b5fb2 ...
I0201 09:09:19.701723 961265 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/apiserver.crt.dd3b5fb2: {Name:mka4bc6482a7c74cd5a9648132e0c73303a055e0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I0201 09:09:19.701896 961265 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/apiserver.key.dd3b5fb2 ...
I0201 09:09:19.701911 961265 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/apiserver.key.dd3b5fb2: {Name:mk85c9bfb68d6bab490146263d1bf431892ec4e6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I0201 09:09:19.701986 961265 certs.go:337] copying /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/apiserver.crt.dd3b5fb2 -> /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/apiserver.crt
I0201 09:09:19.702051 961265 certs.go:341] copying /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/apiserver.key.dd3b5fb2 -> /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/apiserver.key
I0201 09:09:19.702093 961265 certs.go:319] generating aggregator signed cert: /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/proxy-client.key
I0201 09:09:19.702112 961265 crypto.go:68] Generating cert /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/proxy-client.crt with IP's: []
I0201 09:09:19.832638 961265 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/proxy-client.crt ...
I0201 09:09:19.832674 961265 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/proxy-client.crt: {Name:mk61769422e207c5e1f67fa2a72f5b75fd28e6bb Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I0201 09:09:19.832849 961265 crypto.go:164] Writing key to /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/proxy-client.key ...
I0201 09:09:19.832863 961265 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/proxy-client.key: {Name:mkfc418ee52ab04e61a516cbff6ec8d9d63c7c61 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I0201 09:09:19.833065 961265 certs.go:437] found cert: /home/jenkins/minikube-integration/18051-952908/.minikube/certs/home/jenkins/minikube-integration/18051-952908/.minikube/certs/ca-key.pem (1679 bytes)
I0201 09:09:19.833103 961265 certs.go:437] found cert: /home/jenkins/minikube-integration/18051-952908/.minikube/certs/home/jenkins/minikube-integration/18051-952908/.minikube/certs/ca.pem (1078 bytes)
I0201 09:09:19.833125 961265 certs.go:437] found cert: /home/jenkins/minikube-integration/18051-952908/.minikube/certs/home/jenkins/minikube-integration/18051-952908/.minikube/certs/cert.pem (1123 bytes)
I0201 09:09:19.833146 961265 certs.go:437] found cert: /home/jenkins/minikube-integration/18051-952908/.minikube/certs/home/jenkins/minikube-integration/18051-952908/.minikube/certs/key.pem (1675 bytes)
I0201 09:09:19.833737 961265 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1399 bytes)
I0201 09:09:19.856837 961265 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
I0201 09:09:19.879143 961265 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
I0201 09:09:19.901441 961265 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18051-952908/.minikube/profiles/addons-642352/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
I0201 09:09:19.923291 961265 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18051-952908/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
I0201 09:09:19.945125 961265 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18051-952908/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
I0201 09:09:19.966737 961265 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18051-952908/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
I0201 09:09:19.987832 961265 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18051-952908/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
I0201 09:09:20.009196 961265 ssh_runner.go:362] scp /home/jenkins/minikube-integration/18051-952908/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
I0201 09:09:20.030994 961265 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
I0201 09:09:20.046938 961265 ssh_runner.go:195] Run: openssl version
I0201 09:09:20.052019 961265 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
I0201 09:09:20.060548 961265 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
I0201 09:09:20.063919 961265 certs.go:480] hashing: -rw-r--r-- 1 root root 1111 Feb 1 09:09 /usr/share/ca-certificates/minikubeCA.pem
I0201 09:09:20.063963 961265 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
I0201 09:09:20.070126 961265 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
I0201 09:09:20.078526 961265 ssh_runner.go:195] Run: ls /var/lib/minikube/certs/etcd
I0201 09:09:20.081686 961265 certs.go:353] certs directory doesn't exist, likely first start: ls /var/lib/minikube/certs/etcd: Process exited with status 2
stdout:
stderr:
ls: cannot access '/var/lib/minikube/certs/etcd': No such file or directory
I0201 09:09:20.081743 961265 kubeadm.go:404] StartCluster: {Name:addons-642352 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.42-1704759386-17866@sha256:8c3c33047f9bc285e1f5f2a5aa14744a2fe04c58478f02f77b06169dea8dd3f0 Memory:4000 CPUs:2 DiskSize:20000 VMDriver: Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:0 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.28.4 ClusterName:addons-642352 Namespace:default APIServerName:minikubeCA APIServerNames:[] APISe
rverIPs:[] DNSDomain:cluster.local ContainerRuntime:crio CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI: NodeIP: NodePort:8443 NodeName:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.28.4 ContainerRuntime:crio ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:f
alse CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs:}
I0201 09:09:20.081822 961265 cri.go:54] listing CRI containers in root : {State:paused Name: Namespaces:[kube-system]}
I0201 09:09:20.081883 961265 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
I0201 09:09:20.115818 961265 cri.go:89] found id: ""
I0201 09:09:20.115879 961265 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
I0201 09:09:20.124121 961265 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
I0201 09:09:20.132398 961265 kubeadm.go:226] ignoring SystemVerification for kubeadm because of docker driver
I0201 09:09:20.132452 961265 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
I0201 09:09:20.140214 961265 kubeadm.go:152] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
stdout:
stderr:
ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
I0201 09:09:20.140262 961265 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.28.4:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
I0201 09:09:20.183277 961265 kubeadm.go:322] [init] Using Kubernetes version: v1.28.4
I0201 09:09:20.183353 961265 kubeadm.go:322] [preflight] Running pre-flight checks
I0201 09:09:20.220765 961265 kubeadm.go:322] [preflight] The system verification failed. Printing the output from the verification:
I0201 09:09:20.220862 961265 kubeadm.go:322] [0;37mKERNEL_VERSION[0m: [0;32m5.15.0-1049-gcp[0m
I0201 09:09:20.220912 961265 kubeadm.go:322] [0;37mOS[0m: [0;32mLinux[0m
I0201 09:09:20.220963 961265 kubeadm.go:322] [0;37mCGROUPS_CPU[0m: [0;32menabled[0m
I0201 09:09:20.221029 961265 kubeadm.go:322] [0;37mCGROUPS_CPUACCT[0m: [0;32menabled[0m
I0201 09:09:20.221092 961265 kubeadm.go:322] [0;37mCGROUPS_CPUSET[0m: [0;32menabled[0m
I0201 09:09:20.221146 961265 kubeadm.go:322] [0;37mCGROUPS_DEVICES[0m: [0;32menabled[0m
I0201 09:09:20.221187 961265 kubeadm.go:322] [0;37mCGROUPS_FREEZER[0m: [0;32menabled[0m
I0201 09:09:20.221277 961265 kubeadm.go:322] [0;37mCGROUPS_MEMORY[0m: [0;32menabled[0m
I0201 09:09:20.221356 961265 kubeadm.go:322] [0;37mCGROUPS_PIDS[0m: [0;32menabled[0m
I0201 09:09:20.221442 961265 kubeadm.go:322] [0;37mCGROUPS_HUGETLB[0m: [0;32menabled[0m
I0201 09:09:20.221501 961265 kubeadm.go:322] [0;37mCGROUPS_BLKIO[0m: [0;32menabled[0m
I0201 09:09:20.284833 961265 kubeadm.go:322] [preflight] Pulling images required for setting up a Kubernetes cluster
I0201 09:09:20.284988 961265 kubeadm.go:322] [preflight] This might take a minute or two, depending on the speed of your internet connection
I0201 09:09:20.285129 961265 kubeadm.go:322] [preflight] You can also perform this action in beforehand using 'kubeadm config images pull'
I0201 09:09:20.485762 961265 kubeadm.go:322] [certs] Using certificateDir folder "/var/lib/minikube/certs"
I0201 09:09:20.488667 961265 out.go:204] - Generating certificates and keys ...
I0201 09:09:20.488789 961265 kubeadm.go:322] [certs] Using existing ca certificate authority
I0201 09:09:20.488887 961265 kubeadm.go:322] [certs] Using existing apiserver certificate and key on disk
I0201 09:09:20.670233 961265 kubeadm.go:322] [certs] Generating "apiserver-kubelet-client" certificate and key
I0201 09:09:20.759031 961265 kubeadm.go:322] [certs] Generating "front-proxy-ca" certificate and key
I0201 09:09:20.916162 961265 kubeadm.go:322] [certs] Generating "front-proxy-client" certificate and key
I0201 09:09:21.000869 961265 kubeadm.go:322] [certs] Generating "etcd/ca" certificate and key
I0201 09:09:21.218037 961265 kubeadm.go:322] [certs] Generating "etcd/server" certificate and key
I0201 09:09:21.218220 961265 kubeadm.go:322] [certs] etcd/server serving cert is signed for DNS names [addons-642352 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
I0201 09:09:21.405168 961265 kubeadm.go:322] [certs] Generating "etcd/peer" certificate and key
I0201 09:09:21.405327 961265 kubeadm.go:322] [certs] etcd/peer serving cert is signed for DNS names [addons-642352 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
I0201 09:09:21.729915 961265 kubeadm.go:322] [certs] Generating "etcd/healthcheck-client" certificate and key
I0201 09:09:22.024394 961265 kubeadm.go:322] [certs] Generating "apiserver-etcd-client" certificate and key
I0201 09:09:22.107168 961265 kubeadm.go:322] [certs] Generating "sa" key and public key
I0201 09:09:22.107286 961265 kubeadm.go:322] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
I0201 09:09:22.165730 961265 kubeadm.go:322] [kubeconfig] Writing "admin.conf" kubeconfig file
I0201 09:09:22.257276 961265 kubeadm.go:322] [kubeconfig] Writing "kubelet.conf" kubeconfig file
I0201 09:09:22.358563 961265 kubeadm.go:322] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
I0201 09:09:22.565604 961265 kubeadm.go:322] [kubeconfig] Writing "scheduler.conf" kubeconfig file
I0201 09:09:22.567150 961265 kubeadm.go:322] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
I0201 09:09:22.569348 961265 kubeadm.go:322] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
I0201 09:09:22.571545 961265 out.go:204] - Booting up control plane ...
I0201 09:09:22.571646 961265 kubeadm.go:322] [control-plane] Creating static Pod manifest for "kube-apiserver"
I0201 09:09:22.571740 961265 kubeadm.go:322] [control-plane] Creating static Pod manifest for "kube-controller-manager"
I0201 09:09:22.571829 961265 kubeadm.go:322] [control-plane] Creating static Pod manifest for "kube-scheduler"
I0201 09:09:22.579873 961265 kubeadm.go:322] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
I0201 09:09:22.580591 961265 kubeadm.go:322] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
I0201 09:09:22.580668 961265 kubeadm.go:322] [kubelet-start] Starting the kubelet
I0201 09:09:22.666877 961265 kubeadm.go:322] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests". This can take up to 4m0s
I0201 09:09:27.669039 961265 kubeadm.go:322] [apiclient] All control plane components are healthy after 5.002277 seconds
I0201 09:09:27.669228 961265 kubeadm.go:322] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
I0201 09:09:27.684587 961265 kubeadm.go:322] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
I0201 09:09:28.204990 961265 kubeadm.go:322] [upload-certs] Skipping phase. Please see --upload-certs
I0201 09:09:28.205191 961265 kubeadm.go:322] [mark-control-plane] Marking the node addons-642352 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
I0201 09:09:28.715329 961265 kubeadm.go:322] [bootstrap-token] Using token: fkfict.yuj3rfx43mdvtl9z
I0201 09:09:28.716948 961265 out.go:204] - Configuring RBAC rules ...
I0201 09:09:28.717077 961265 kubeadm.go:322] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
I0201 09:09:28.721423 961265 kubeadm.go:322] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
I0201 09:09:28.728086 961265 kubeadm.go:322] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
I0201 09:09:28.730962 961265 kubeadm.go:322] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
I0201 09:09:28.737240 961265 kubeadm.go:322] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
I0201 09:09:28.742431 961265 kubeadm.go:322] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
I0201 09:09:28.758012 961265 kubeadm.go:322] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
I0201 09:09:28.958867 961265 kubeadm.go:322] [addons] Applied essential addon: CoreDNS
I0201 09:09:29.135285 961265 kubeadm.go:322] [addons] Applied essential addon: kube-proxy
I0201 09:09:29.136898 961265 kubeadm.go:322]
I0201 09:09:29.137084 961265 kubeadm.go:322] Your Kubernetes control-plane has initialized successfully!
I0201 09:09:29.137102 961265 kubeadm.go:322]
I0201 09:09:29.137184 961265 kubeadm.go:322] To start using your cluster, you need to run the following as a regular user:
I0201 09:09:29.137195 961265 kubeadm.go:322]
I0201 09:09:29.137224 961265 kubeadm.go:322] mkdir -p $HOME/.kube
I0201 09:09:29.137285 961265 kubeadm.go:322] sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
I0201 09:09:29.137358 961265 kubeadm.go:322] sudo chown $(id -u):$(id -g) $HOME/.kube/config
I0201 09:09:29.137370 961265 kubeadm.go:322]
I0201 09:09:29.137425 961265 kubeadm.go:322] Alternatively, if you are the root user, you can run:
I0201 09:09:29.137432 961265 kubeadm.go:322]
I0201 09:09:29.137497 961265 kubeadm.go:322] export KUBECONFIG=/etc/kubernetes/admin.conf
I0201 09:09:29.137507 961265 kubeadm.go:322]
I0201 09:09:29.137563 961265 kubeadm.go:322] You should now deploy a pod network to the cluster.
I0201 09:09:29.137657 961265 kubeadm.go:322] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
I0201 09:09:29.137742 961265 kubeadm.go:322] https://kubernetes.io/docs/concepts/cluster-administration/addons/
I0201 09:09:29.137756 961265 kubeadm.go:322]
I0201 09:09:29.137891 961265 kubeadm.go:322] You can now join any number of control-plane nodes by copying certificate authorities
I0201 09:09:29.138002 961265 kubeadm.go:322] and service account keys on each node and then running the following as root:
I0201 09:09:29.138010 961265 kubeadm.go:322]
I0201 09:09:29.138109 961265 kubeadm.go:322] kubeadm join control-plane.minikube.internal:8443 --token fkfict.yuj3rfx43mdvtl9z \
I0201 09:09:29.138235 961265 kubeadm.go:322] --discovery-token-ca-cert-hash sha256:7910553c67bf33c7893af1499c33a494f0bc07d5d4917285901e8697cae63a23 \
I0201 09:09:29.138267 961265 kubeadm.go:322] --control-plane
I0201 09:09:29.138285 961265 kubeadm.go:322]
I0201 09:09:29.138420 961265 kubeadm.go:322] Then you can join any number of worker nodes by running the following on each as root:
I0201 09:09:29.138437 961265 kubeadm.go:322]
I0201 09:09:29.138532 961265 kubeadm.go:322] kubeadm join control-plane.minikube.internal:8443 --token fkfict.yuj3rfx43mdvtl9z \
I0201 09:09:29.138656 961265 kubeadm.go:322] --discovery-token-ca-cert-hash sha256:7910553c67bf33c7893af1499c33a494f0bc07d5d4917285901e8697cae63a23
I0201 09:09:29.141008 961265 kubeadm.go:322] [WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1049-gcp\n", err: exit status 1
I0201 09:09:29.141134 961265 kubeadm.go:322] [WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
I0201 09:09:29.141165 961265 cni.go:84] Creating CNI manager for ""
I0201 09:09:29.141175 961265 cni.go:143] "docker" driver + "crio" runtime found, recommending kindnet
I0201 09:09:29.142900 961265 out.go:177] * Configuring CNI (Container Networking Interface) ...
I0201 09:09:29.144446 961265 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
I0201 09:09:29.149570 961265 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.28.4/kubectl ...
I0201 09:09:29.149590 961265 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2438 bytes)
I0201 09:09:29.168496 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
I0201 09:09:29.935985 961265 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
I0201 09:09:29.936073 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:29.936091 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl label nodes minikube.k8s.io/version=v1.32.0 minikube.k8s.io/commit=de6311e496aefb62bd53fcfd0fb6b150999d9424 minikube.k8s.io/name=addons-642352 minikube.k8s.io/updated_at=2024_02_01T09_09_29_0700 minikube.k8s.io/primary=true --all --overwrite --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:29.943226 961265 ops.go:34] apiserver oom_adj: -16
I0201 09:09:30.045308 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:30.545364 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:31.046182 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:31.546085 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:32.045472 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:32.545807 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:33.046045 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:33.546351 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:34.045491 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:34.546330 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:35.045734 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:35.546302 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:36.045679 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:36.545443 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:37.045564 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:37.545339 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:38.045410 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:38.546067 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:39.046051 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:39.545549 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:40.045939 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:40.546198 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:41.045833 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:41.546382 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:42.046182 961265 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.28.4/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
I0201 09:09:42.117313 961265 kubeadm.go:1088] duration metric: took 12.181310623s to wait for elevateKubeSystemPrivileges.
I0201 09:09:42.117352 961265 kubeadm.go:406] StartCluster complete in 22.035616435s
I0201 09:09:42.117372 961265 settings.go:142] acquiring lock: {Name:mk0819893db79284ba714854fba438996c690ff3 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I0201 09:09:42.117477 961265 settings.go:150] Updating kubeconfig: /home/jenkins/minikube-integration/18051-952908/kubeconfig
I0201 09:09:42.117945 961265 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/18051-952908/kubeconfig: {Name:mk4dec6d7936952ed996b642fbbfa2a496c41523 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I0201 09:09:42.118291 961265 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.28.4/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
I0201 09:09:42.118377 961265 addons.go:502] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:true csi-hostpath-driver:true dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:true gvisor:false headlamp:false helm-tiller:true inaccel:false ingress:true ingress-dns:true inspektor-gadget:true istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:true nvidia-device-plugin:true nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:true registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:true volumesnapshots:true yakd:true]
I0201 09:09:42.118521 961265 config.go:182] Loaded profile config "addons-642352": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.28.4
I0201 09:09:42.118535 961265 addons.go:69] Setting default-storageclass=true in profile "addons-642352"
I0201 09:09:42.118549 961265 addons.go:69] Setting yakd=true in profile "addons-642352"
I0201 09:09:42.118561 961265 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "addons-642352"
I0201 09:09:42.118568 961265 addons.go:234] Setting addon yakd=true in "addons-642352"
I0201 09:09:42.118568 961265 addons.go:69] Setting csi-hostpath-driver=true in profile "addons-642352"
I0201 09:09:42.118554 961265 addons.go:69] Setting cloud-spanner=true in profile "addons-642352"
I0201 09:09:42.118589 961265 addons.go:234] Setting addon cloud-spanner=true in "addons-642352"
I0201 09:09:42.118605 961265 addons.go:234] Setting addon csi-hostpath-driver=true in "addons-642352"
I0201 09:09:42.118619 961265 host.go:66] Checking if "addons-642352" exists ...
I0201 09:09:42.118634 961265 host.go:66] Checking if "addons-642352" exists ...
I0201 09:09:42.118651 961265 host.go:66] Checking if "addons-642352" exists ...
I0201 09:09:42.118948 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:42.119091 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:42.119135 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:42.119150 961265 addons.go:69] Setting nvidia-device-plugin=true in profile "addons-642352"
I0201 09:09:42.119165 961265 addons.go:234] Setting addon nvidia-device-plugin=true in "addons-642352"
I0201 09:09:42.119179 961265 addons.go:69] Setting metrics-server=true in profile "addons-642352"
I0201 09:09:42.119206 961265 host.go:66] Checking if "addons-642352" exists ...
I0201 09:09:42.119213 961265 addons.go:234] Setting addon metrics-server=true in "addons-642352"
I0201 09:09:42.119237 961265 addons.go:69] Setting storage-provisioner=true in profile "addons-642352"
I0201 09:09:42.119257 961265 addons.go:234] Setting addon storage-provisioner=true in "addons-642352"
I0201 09:09:42.119257 961265 host.go:66] Checking if "addons-642352" exists ...
I0201 09:09:42.119254 961265 addons.go:69] Setting registry=true in profile "addons-642352"
I0201 09:09:42.119280 961265 addons.go:234] Setting addon registry=true in "addons-642352"
I0201 09:09:42.119293 961265 host.go:66] Checking if "addons-642352" exists ...
I0201 09:09:42.119325 961265 host.go:66] Checking if "addons-642352" exists ...
I0201 09:09:42.119369 961265 addons.go:69] Setting storage-provisioner-rancher=true in profile "addons-642352"
I0201 09:09:42.119389 961265 addons_storage_classes.go:33] enableOrDisableStorageClasses storage-provisioner-rancher=true on "addons-642352"
I0201 09:09:42.119468 961265 addons.go:69] Setting volumesnapshots=true in profile "addons-642352"
I0201 09:09:42.119479 961265 addons.go:234] Setting addon volumesnapshots=true in "addons-642352"
I0201 09:09:42.119515 961265 host.go:66] Checking if "addons-642352" exists ...
I0201 09:09:42.119629 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:42.119714 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:42.119733 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:42.119742 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:42.119947 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:42.120332 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:42.119135 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:42.122844 961265 addons.go:69] Setting gcp-auth=true in profile "addons-642352"
I0201 09:09:42.122875 961265 mustload.go:65] Loading cluster: addons-642352
I0201 09:09:42.123093 961265 config.go:182] Loaded profile config "addons-642352": Driver=docker, ContainerRuntime=crio, KubernetesVersion=v1.28.4
I0201 09:09:42.123339 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:42.124624 961265 addons.go:69] Setting ingress-dns=true in profile "addons-642352"
I0201 09:09:42.124648 961265 addons.go:234] Setting addon ingress-dns=true in "addons-642352"
I0201 09:09:42.124712 961265 host.go:66] Checking if "addons-642352" exists ...
I0201 09:09:42.124791 961265 addons.go:69] Setting helm-tiller=true in profile "addons-642352"
I0201 09:09:42.124814 961265 addons.go:234] Setting addon helm-tiller=true in "addons-642352"
I0201 09:09:42.124860 961265 host.go:66] Checking if "addons-642352" exists ...
I0201 09:09:42.125201 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:42.125346 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:42.125663 961265 addons.go:69] Setting ingress=true in profile "addons-642352"
I0201 09:09:42.125684 961265 addons.go:234] Setting addon ingress=true in "addons-642352"
I0201 09:09:42.125738 961265 host.go:66] Checking if "addons-642352" exists ...
I0201 09:09:42.126037 961265 addons.go:69] Setting inspektor-gadget=true in profile "addons-642352"
I0201 09:09:42.126092 961265 addons.go:234] Setting addon inspektor-gadget=true in "addons-642352"
I0201 09:09:42.126154 961265 host.go:66] Checking if "addons-642352" exists ...
I0201 09:09:42.139814 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:42.142954 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:42.166152 961265 out.go:177] - Using image gcr.io/k8s-minikube/storage-provisioner:v5
I0201 09:09:42.168251 961265 addons.go:426] installing /etc/kubernetes/addons/storage-provisioner.yaml
I0201 09:09:42.168276 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
I0201 09:09:42.168339 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:42.169282 961265 host.go:66] Checking if "addons-642352" exists ...
I0201 09:09:42.176989 961265 out.go:177] - Using image nvcr.io/nvidia/k8s-device-plugin:v0.14.3
I0201 09:09:42.179565 961265 addons.go:426] installing /etc/kubernetes/addons/nvidia-device-plugin.yaml
I0201 09:09:42.179595 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/nvidia-device-plugin.yaml (1966 bytes)
I0201 09:09:42.179663 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:42.183069 961265 out.go:177] - Using image registry.k8s.io/metrics-server/metrics-server:v0.7.0
I0201 09:09:42.184790 961265 addons.go:426] installing /etc/kubernetes/addons/metrics-apiservice.yaml
I0201 09:09:42.184825 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-apiservice.yaml (424 bytes)
I0201 09:09:42.184887 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:42.186065 961265 out.go:177] - Using image ghcr.io/inspektor-gadget/inspektor-gadget:v0.24.0
I0201 09:09:42.189392 961265 addons.go:426] installing /etc/kubernetes/addons/ig-namespace.yaml
I0201 09:09:42.189417 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-namespace.yaml (55 bytes)
I0201 09:09:42.189499 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:42.194272 961265 addons.go:234] Setting addon default-storageclass=true in "addons-642352"
I0201 09:09:42.194339 961265 host.go:66] Checking if "addons-642352" exists ...
I0201 09:09:42.194863 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:42.198374 961265 out.go:177] - Using image docker.io/marcnuri/yakd:0.0.4
I0201 09:09:42.203615 961265 addons.go:426] installing /etc/kubernetes/addons/yakd-ns.yaml
I0201 09:09:42.203645 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-ns.yaml (171 bytes)
I0201 09:09:42.203710 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:42.205699 961265 out.go:177] - Using image registry.k8s.io/sig-storage/snapshot-controller:v6.1.0
I0201 09:09:42.211702 961265 addons.go:426] installing /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml
I0201 09:09:42.212669 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml (934 bytes)
I0201 09:09:42.212746 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:42.212933 961265 out.go:177] - Using image registry.k8s.io/ingress-nginx/controller:v1.9.5
I0201 09:09:42.211791 961265 out.go:177] - Using image docker.io/registry:2.8.3
I0201 09:09:42.212001 961265 out.go:177] - Using image gcr.io/k8s-minikube/minikube-ingress-dns:0.0.2
I0201 09:09:42.212191 961265 out.go:177] - Using image gcr.io/cloud-spanner-emulator/emulator:1.5.13
I0201 09:09:42.216024 961265 addons.go:426] installing /etc/kubernetes/addons/deployment.yaml
I0201 09:09:42.216046 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/deployment.yaml (1004 bytes)
I0201 09:09:42.216114 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:42.217596 961265 out.go:177] - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v20231011-8b53cabe0
I0201 09:09:42.217619 961265 addons.go:234] Setting addon storage-provisioner-rancher=true in "addons-642352"
I0201 09:09:42.219075 961265 out.go:177] - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v20231011-8b53cabe0
I0201 09:09:42.219143 961265 host.go:66] Checking if "addons-642352" exists ...
I0201 09:09:42.220710 961265 addons.go:426] installing /etc/kubernetes/addons/ingress-dns-pod.yaml
I0201 09:09:42.220734 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-dns-pod.yaml (2442 bytes)
I0201 09:09:42.220797 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:42.222496 961265 out.go:177] - Using image gcr.io/k8s-minikube/kube-registry-proxy:0.0.5
I0201 09:09:42.220995 961265 out.go:177] - Using image registry.k8s.io/sig-storage/livenessprobe:v2.8.0
I0201 09:09:42.221295 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:42.223938 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:42.224068 961265 addons.go:426] installing /etc/kubernetes/addons/registry-rc.yaml
I0201 09:09:42.225844 961265 addons.go:426] installing /etc/kubernetes/addons/ingress-deploy.yaml
I0201 09:09:42.227708 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-deploy.yaml (16103 bytes)
I0201 09:09:42.227746 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-rc.yaml (798 bytes)
I0201 09:09:42.229124 961265 out.go:177] - Using image registry.k8s.io/sig-storage/csi-resizer:v1.6.0
I0201 09:09:42.227789 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:42.227805 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:42.232520 961265 out.go:177] - Using image registry.k8s.io/sig-storage/csi-snapshotter:v6.1.0
I0201 09:09:42.234610 961265 out.go:177] - Using image registry.k8s.io/sig-storage/csi-provisioner:v3.3.0
I0201 09:09:42.238450 961265 out.go:177] - Using image registry.k8s.io/sig-storage/csi-attacher:v4.0.0
I0201 09:09:42.241733 961265 out.go:177] - Using image registry.k8s.io/sig-storage/csi-external-health-monitor-controller:v0.7.0
I0201 09:09:42.241712 961265 out.go:177] - Using image ghcr.io/helm/tiller:v2.17.0
I0201 09:09:42.244156 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:42.249977 961265 out.go:177] - Using image registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.6.0
I0201 09:09:42.247905 961265 addons.go:426] installing /etc/kubernetes/addons/helm-tiller-dp.yaml
I0201 09:09:42.248382 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:42.252481 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:42.252559 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:42.254622 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/helm-tiller-dp.yaml (2422 bytes)
I0201 09:09:42.254710 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:42.259856 961265 out.go:177] - Using image registry.k8s.io/sig-storage/hostpathplugin:v1.9.0
I0201 09:09:42.257680 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:42.263516 961265 out.go:177] - Using image docker.io/rancher/local-path-provisioner:v0.0.22
I0201 09:09:42.261272 961265 addons.go:426] installing /etc/kubernetes/addons/rbac-external-attacher.yaml
I0201 09:09:42.265187 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/rbac-external-attacher.yaml (3073 bytes)
I0201 09:09:42.266854 961265 out.go:177] - Using image docker.io/busybox:stable
I0201 09:09:42.265262 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:42.268702 961265 addons.go:426] installing /etc/kubernetes/addons/storage-provisioner-rancher.yaml
I0201 09:09:42.268721 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner-rancher.yaml (3113 bytes)
I0201 09:09:42.268778 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:42.272212 961265 addons.go:426] installing /etc/kubernetes/addons/storageclass.yaml
I0201 09:09:42.272232 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
I0201 09:09:42.272281 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:42.280015 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:42.286526 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:42.294819 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:42.298901 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:42.300699 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:42.300880 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:42.307556 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:42.311537 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:42.350575 961265 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.28.4/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^ forward . \/etc\/resolv.conf.*/i \ hosts {\n 192.168.49.1 host.minikube.internal\n fallthrough\n }' -e '/^ errors *$/i \ log' | sudo /var/lib/minikube/binaries/v1.28.4/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
I0201 09:09:42.635019 961265 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
I0201 09:09:42.643883 961265 addons.go:426] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml
I0201 09:09:42.643924 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml (6471 bytes)
I0201 09:09:42.651441 961265 kapi.go:248] "coredns" deployment in "kube-system" namespace and "addons-642352" context rescaled to 1 replicas
I0201 09:09:42.651496 961265 start.go:223] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.28.4 ContainerRuntime:crio ControlPlane:true Worker:true}
I0201 09:09:42.653572 961265 out.go:177] * Verifying Kubernetes components...
I0201 09:09:42.653179 961265 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml
I0201 09:09:42.655201 961265 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
I0201 09:09:42.743384 961265 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml
I0201 09:09:42.833726 961265 addons.go:426] installing /etc/kubernetes/addons/metrics-server-deployment.yaml
I0201 09:09:42.833823 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-deployment.yaml (1907 bytes)
I0201 09:09:42.834326 961265 addons.go:426] installing /etc/kubernetes/addons/ig-serviceaccount.yaml
I0201 09:09:42.834382 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-serviceaccount.yaml (80 bytes)
I0201 09:09:42.848326 961265 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml
I0201 09:09:42.943761 961265 addons.go:426] installing /etc/kubernetes/addons/registry-svc.yaml
I0201 09:09:42.943863 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-svc.yaml (398 bytes)
I0201 09:09:42.944267 961265 addons.go:426] installing /etc/kubernetes/addons/metrics-server-rbac.yaml
I0201 09:09:42.944331 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-rbac.yaml (2175 bytes)
I0201 09:09:42.945660 961265 addons.go:426] installing /etc/kubernetes/addons/yakd-sa.yaml
I0201 09:09:42.945717 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-sa.yaml (247 bytes)
I0201 09:09:42.951360 961265 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/deployment.yaml
I0201 09:09:42.957162 961265 addons.go:426] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml
I0201 09:09:42.957190 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml (23126 bytes)
I0201 09:09:43.043661 961265 addons.go:426] installing /etc/kubernetes/addons/helm-tiller-rbac.yaml
I0201 09:09:43.043690 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/helm-tiller-rbac.yaml (1188 bytes)
I0201 09:09:43.045923 961265 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
I0201 09:09:43.049644 961265 addons.go:426] installing /etc/kubernetes/addons/ig-role.yaml
I0201 09:09:43.049734 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-role.yaml (210 bytes)
I0201 09:09:43.132956 961265 addons.go:426] installing /etc/kubernetes/addons/rbac-hostpath.yaml
I0201 09:09:43.133053 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/rbac-hostpath.yaml (4266 bytes)
I0201 09:09:43.138535 961265 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml
I0201 09:09:43.147830 961265 addons.go:426] installing /etc/kubernetes/addons/metrics-server-service.yaml
I0201 09:09:43.147928 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-service.yaml (446 bytes)
I0201 09:09:43.232406 961265 addons.go:426] installing /etc/kubernetes/addons/ig-rolebinding.yaml
I0201 09:09:43.232492 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-rolebinding.yaml (244 bytes)
I0201 09:09:43.235099 961265 addons.go:426] installing /etc/kubernetes/addons/registry-proxy.yaml
I0201 09:09:43.235176 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-proxy.yaml (947 bytes)
I0201 09:09:43.239429 961265 addons.go:426] installing /etc/kubernetes/addons/yakd-crb.yaml
I0201 09:09:43.239462 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-crb.yaml (422 bytes)
I0201 09:09:43.332119 961265 addons.go:426] installing /etc/kubernetes/addons/helm-tiller-svc.yaml
I0201 09:09:43.332240 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/helm-tiller-svc.yaml (951 bytes)
I0201 09:09:43.441924 961265 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
I0201 09:09:43.455008 961265 addons.go:426] installing /etc/kubernetes/addons/yakd-svc.yaml
I0201 09:09:43.455068 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-svc.yaml (412 bytes)
I0201 09:09:43.532587 961265 addons.go:426] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml
I0201 09:09:43.532682 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml (19582 bytes)
I0201 09:09:43.546438 961265 addons.go:426] installing /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml
I0201 09:09:43.546521 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml (3038 bytes)
I0201 09:09:43.547895 961265 addons.go:426] installing /etc/kubernetes/addons/ig-clusterrole.yaml
I0201 09:09:43.547964 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-clusterrole.yaml (1485 bytes)
I0201 09:09:43.634685 961265 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/helm-tiller-dp.yaml -f /etc/kubernetes/addons/helm-tiller-rbac.yaml -f /etc/kubernetes/addons/helm-tiller-svc.yaml
I0201 09:09:43.641986 961265 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml
I0201 09:09:43.832394 961265 addons.go:426] installing /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml
I0201 09:09:43.832505 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml (3545 bytes)
I0201 09:09:43.839319 961265 addons.go:426] installing /etc/kubernetes/addons/yakd-dp.yaml
I0201 09:09:43.839417 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-dp.yaml (2017 bytes)
I0201 09:09:43.850723 961265 addons.go:426] installing /etc/kubernetes/addons/ig-clusterrolebinding.yaml
I0201 09:09:43.850837 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-clusterrolebinding.yaml (274 bytes)
I0201 09:09:44.031042 961265 addons.go:426] installing /etc/kubernetes/addons/rbac-external-provisioner.yaml
I0201 09:09:44.031136 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/rbac-external-provisioner.yaml (4442 bytes)
I0201 09:09:44.234986 961265 addons.go:426] installing /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
I0201 09:09:44.235087 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml (1475 bytes)
I0201 09:09:44.241023 961265 addons.go:426] installing /etc/kubernetes/addons/ig-crd.yaml
I0201 09:09:44.241165 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-crd.yaml (5216 bytes)
I0201 09:09:44.332067 961265 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml
I0201 09:09:44.636750 961265 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
I0201 09:09:44.742001 961265 addons.go:426] installing /etc/kubernetes/addons/ig-daemonset.yaml
I0201 09:09:44.742127 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-daemonset.yaml (7735 bytes)
I0201 09:09:44.844568 961265 addons.go:426] installing /etc/kubernetes/addons/rbac-external-resizer.yaml
I0201 09:09:44.844655 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/rbac-external-resizer.yaml (2943 bytes)
I0201 09:09:45.040114 961265 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml
I0201 09:09:45.341209 961265 addons.go:426] installing /etc/kubernetes/addons/rbac-external-snapshotter.yaml
I0201 09:09:45.341302 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/rbac-external-snapshotter.yaml (3149 bytes)
I0201 09:09:45.443247 961265 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.28.4/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^ forward . \/etc\/resolv.conf.*/i \ hosts {\n 192.168.49.1 host.minikube.internal\n fallthrough\n }' -e '/^ errors *$/i \ log' | sudo /var/lib/minikube/binaries/v1.28.4/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -": (3.092615936s)
I0201 09:09:45.443287 961265 start.go:929] {"host.minikube.internal": 192.168.49.1} host record injected into CoreDNS's ConfigMap
I0201 09:09:45.732824 961265 addons.go:426] installing /etc/kubernetes/addons/csi-hostpath-attacher.yaml
I0201 09:09:45.732936 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-attacher.yaml (2143 bytes)
I0201 09:09:45.849683 961265 addons.go:426] installing /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml
I0201 09:09:45.849719 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml (1274 bytes)
I0201 09:09:46.232782 961265 addons.go:426] installing /etc/kubernetes/addons/csi-hostpath-plugin.yaml
I0201 09:09:46.232842 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-plugin.yaml (8201 bytes)
I0201 09:09:46.837648 961265 addons.go:426] installing /etc/kubernetes/addons/csi-hostpath-resizer.yaml
I0201 09:09:46.837742 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-resizer.yaml (2191 bytes)
I0201 09:09:47.131082 961265 addons.go:426] installing /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
I0201 09:09:47.131173 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-storageclass.yaml (846 bytes)
I0201 09:09:47.451779 961265 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
I0201 09:09:47.655627 961265 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (5.020553961s)
I0201 09:09:47.655716 961265 ssh_runner.go:235] Completed: sudo systemctl is-active --quiet service kubelet: (5.000491953s)
I0201 09:09:47.656798 961265 node_ready.go:35] waiting up to 6m0s for node "addons-642352" to be "Ready" ...
I0201 09:09:47.657056 961265 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml: (5.001868939s)
I0201 09:09:49.033999 961265 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_application_credentials.json (162 bytes)
I0201 09:09:49.034140 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:49.047509 961265 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml: (6.304073281s)
I0201 09:09:49.047553 961265 addons.go:470] Verifying addon ingress=true in "addons-642352"
I0201 09:09:49.049224 961265 out.go:177] * Verifying ingress addon...
I0201 09:09:49.047628 961265 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml: (6.199263097s)
I0201 09:09:49.047750 961265 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/deployment.yaml: (6.096343787s)
I0201 09:09:49.047822 961265 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: (6.001853722s)
I0201 09:09:49.047901 961265 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml: (5.90933897s)
I0201 09:09:49.047991 961265 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: (5.605963625s)
I0201 09:09:49.049610 961265 addons.go:470] Verifying addon metrics-server=true in "addons-642352"
I0201 09:09:49.048050 961265 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/helm-tiller-dp.yaml -f /etc/kubernetes/addons/helm-tiller-rbac.yaml -f /etc/kubernetes/addons/helm-tiller-svc.yaml: (5.41324886s)
I0201 09:09:49.048078 961265 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml: (5.406051954s)
I0201 09:09:49.049705 961265 addons.go:470] Verifying addon registry=true in "addons-642352"
I0201 09:09:49.048105 961265 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml: (4.71594539s)
I0201 09:09:49.048235 961265 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (4.411438402s)
I0201 09:09:49.048281 961265 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml: (4.008077533s)
I0201 09:09:49.051450 961265 out.go:177] * Verifying registry addon...
W0201 09:09:49.051674 961265 addons.go:452] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
stdout:
customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
serviceaccount/snapshot-controller created
clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
deployment.apps/snapshot-controller created
stderr:
error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
ensure CRDs are installed first
I0201 09:09:49.057921 961265 kapi.go:75] Waiting for pod with label "app.kubernetes.io/name=ingress-nginx" in ns "ingress-nginx" ...
I0201 09:09:49.063015 961265 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=registry" in ns "kube-system" ...
I0201 09:09:49.063289 961265 out.go:177] * To access YAKD - Kubernetes Dashboard, wait for Pod to be ready and run the following command:
minikube -p addons-642352 service yakd-dashboard -n yakd-dashboard
I0201 09:09:49.065708 961265 retry.go:31] will retry after 286.419605ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
stdout:
customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
serviceaccount/snapshot-controller created
clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
deployment.apps/snapshot-controller created
stderr:
error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
ensure CRDs are installed first
I0201 09:09:49.067860 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
W0201 09:09:49.143069 961265 out.go:239] ! Enabling 'default-storageclass' returned an error: running callbacks: [Error making standard the default storage class: Error while marking storage class local-path as non-default: Operation cannot be fulfilled on storageclasses.storage.k8s.io "local-path": the object has been modified; please apply your changes to the latest version and try again]
I0201 09:09:49.145347 961265 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=registry
I0201 09:09:49.145381 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:49.145704 961265 kapi.go:86] Found 3 Pods for label selector app.kubernetes.io/name=ingress-nginx
I0201 09:09:49.145734 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:49.349774 961265 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_cloud_project (12 bytes)
I0201 09:09:49.353127 961265 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
I0201 09:09:49.370209 961265 addons.go:234] Setting addon gcp-auth=true in "addons-642352"
I0201 09:09:49.370279 961265 host.go:66] Checking if "addons-642352" exists ...
I0201 09:09:49.370849 961265 cli_runner.go:164] Run: docker container inspect addons-642352 --format={{.State.Status}}
I0201 09:09:49.394003 961265 ssh_runner.go:195] Run: cat /var/lib/minikube/google_application_credentials.json
I0201 09:09:49.394056 961265 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-642352
I0201 09:09:49.410797 961265 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:34031 SSHKeyPath:/home/jenkins/minikube-integration/18051-952908/.minikube/machines/addons-642352/id_rsa Username:docker}
I0201 09:09:49.567027 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:49.570238 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:49.660040 961265 node_ready.go:58] node "addons-642352" has status "Ready":"False"
I0201 09:09:50.068041 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:50.069856 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:50.567087 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:50.569595 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:51.139646 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:51.140558 961265 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=registry
I0201 09:09:51.140587 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:51.160531 961265 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml: (3.708630502s)
I0201 09:09:51.160576 961265 addons.go:470] Verifying addon csi-hostpath-driver=true in "addons-642352"
I0201 09:09:51.160595 961265 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (1.807387605s)
I0201 09:09:51.162513 961265 out.go:177] * Verifying csi-hostpath-driver addon...
I0201 09:09:51.162557 961265 node_ready.go:49] node "addons-642352" has status "Ready":"True"
I0201 09:09:51.163855 961265 node_ready.go:38] duration metric: took 3.507021919s waiting for node "addons-642352" to be "Ready" ...
I0201 09:09:51.163876 961265 pod_ready.go:35] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
I0201 09:09:51.160648 961265 ssh_runner.go:235] Completed: cat /var/lib/minikube/google_application_credentials.json: (1.766619506s)
I0201 09:09:51.165703 961265 out.go:177] - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v20231011-8b53cabe0
I0201 09:09:51.164697 961265 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=csi-hostpath-driver" in ns "kube-system" ...
I0201 09:09:51.169552 961265 out.go:177] - Using image gcr.io/k8s-minikube/gcp-auth-webhook:v0.1.0
I0201 09:09:51.171533 961265 addons.go:426] installing /etc/kubernetes/addons/gcp-auth-ns.yaml
I0201 09:09:51.171555 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/gcp-auth-ns.yaml (700 bytes)
I0201 09:09:51.174136 961265 pod_ready.go:78] waiting up to 6m0s for pod "coredns-5dd5756b68-97z46" in "kube-system" namespace to be "Ready" ...
I0201 09:09:51.235285 961265 kapi.go:86] Found 3 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
I0201 09:09:51.235313 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:09:51.251913 961265 addons.go:426] installing /etc/kubernetes/addons/gcp-auth-service.yaml
I0201 09:09:51.251942 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/gcp-auth-service.yaml (788 bytes)
I0201 09:09:51.273974 961265 addons.go:426] installing /etc/kubernetes/addons/gcp-auth-webhook.yaml
I0201 09:09:51.273998 961265 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/gcp-auth-webhook.yaml (5432 bytes)
I0201 09:09:51.349895 961265 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml
I0201 09:09:51.568335 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:51.570491 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:51.673110 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:09:52.136974 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:52.138439 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:52.235628 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:09:52.642220 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:52.650815 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:52.737286 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:09:53.132815 961265 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.28.4/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml: (1.782815492s)
I0201 09:09:53.134848 961265 addons.go:470] Verifying addon gcp-auth=true in "addons-642352"
I0201 09:09:53.136655 961265 out.go:177] * Verifying gcp-auth addon...
I0201 09:09:53.139478 961265 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=gcp-auth" in ns "gcp-auth" ...
I0201 09:09:53.145247 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:53.146811 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:53.154163 961265 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
I0201 09:09:53.154244 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:09:53.235830 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:09:53.241650 961265 pod_ready.go:102] pod "coredns-5dd5756b68-97z46" in "kube-system" namespace has status "Ready":"False"
I0201 09:09:53.570304 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:53.634083 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:53.644543 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:09:53.737445 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:09:54.137922 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:54.138095 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:54.144132 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:09:54.236445 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:09:54.635992 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:54.637996 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:54.644417 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:09:54.737533 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:09:54.739369 961265 pod_ready.go:92] pod "coredns-5dd5756b68-97z46" in "kube-system" namespace has status "Ready":"True"
I0201 09:09:54.739410 961265 pod_ready.go:81] duration metric: took 3.565246511s waiting for pod "coredns-5dd5756b68-97z46" in "kube-system" namespace to be "Ready" ...
I0201 09:09:54.739440 961265 pod_ready.go:78] waiting up to 6m0s for pod "etcd-addons-642352" in "kube-system" namespace to be "Ready" ...
I0201 09:09:54.747939 961265 pod_ready.go:92] pod "etcd-addons-642352" in "kube-system" namespace has status "Ready":"True"
I0201 09:09:54.747968 961265 pod_ready.go:81] duration metric: took 8.516298ms waiting for pod "etcd-addons-642352" in "kube-system" namespace to be "Ready" ...
I0201 09:09:54.747984 961265 pod_ready.go:78] waiting up to 6m0s for pod "kube-apiserver-addons-642352" in "kube-system" namespace to be "Ready" ...
I0201 09:09:54.756753 961265 pod_ready.go:92] pod "kube-apiserver-addons-642352" in "kube-system" namespace has status "Ready":"True"
I0201 09:09:54.756792 961265 pod_ready.go:81] duration metric: took 8.799033ms waiting for pod "kube-apiserver-addons-642352" in "kube-system" namespace to be "Ready" ...
I0201 09:09:54.756809 961265 pod_ready.go:78] waiting up to 6m0s for pod "kube-controller-manager-addons-642352" in "kube-system" namespace to be "Ready" ...
I0201 09:09:54.832837 961265 pod_ready.go:92] pod "kube-controller-manager-addons-642352" in "kube-system" namespace has status "Ready":"True"
I0201 09:09:54.832865 961265 pod_ready.go:81] duration metric: took 76.047443ms waiting for pod "kube-controller-manager-addons-642352" in "kube-system" namespace to be "Ready" ...
I0201 09:09:54.832882 961265 pod_ready.go:78] waiting up to 6m0s for pod "kube-proxy-gzzdh" in "kube-system" namespace to be "Ready" ...
I0201 09:09:54.838833 961265 pod_ready.go:92] pod "kube-proxy-gzzdh" in "kube-system" namespace has status "Ready":"True"
I0201 09:09:54.838860 961265 pod_ready.go:81] duration metric: took 5.96935ms waiting for pod "kube-proxy-gzzdh" in "kube-system" namespace to be "Ready" ...
I0201 09:09:54.838873 961265 pod_ready.go:78] waiting up to 6m0s for pod "kube-scheduler-addons-642352" in "kube-system" namespace to be "Ready" ...
I0201 09:09:55.133285 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:55.134228 961265 pod_ready.go:92] pod "kube-scheduler-addons-642352" in "kube-system" namespace has status "Ready":"True"
I0201 09:09:55.134254 961265 pod_ready.go:81] duration metric: took 295.372482ms waiting for pod "kube-scheduler-addons-642352" in "kube-system" namespace to be "Ready" ...
I0201 09:09:55.134267 961265 pod_ready.go:78] waiting up to 6m0s for pod "metrics-server-69cf46c98-4mrxs" in "kube-system" namespace to be "Ready" ...
I0201 09:09:55.134676 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:55.143688 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:09:55.174089 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:09:55.568397 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:55.570631 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:55.643862 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:09:55.673305 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:09:56.068236 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:56.070107 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:56.143420 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:09:56.173812 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:09:56.568653 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:56.571205 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:56.643202 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:09:56.672804 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:09:57.067468 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:57.069821 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:57.141121 961265 pod_ready.go:102] pod "metrics-server-69cf46c98-4mrxs" in "kube-system" namespace has status "Ready":"False"
I0201 09:09:57.143146 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:09:57.173336 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:09:57.572873 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:57.573578 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:57.643286 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:09:57.672895 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:09:58.068131 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:58.070015 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:58.143045 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:09:58.173151 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:09:58.638226 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:58.640332 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:58.648296 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:09:58.742675 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:09:59.068687 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:59.133605 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:59.143218 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:09:59.235036 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:09:59.569380 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:09:59.570838 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:09:59.641031 961265 pod_ready.go:102] pod "metrics-server-69cf46c98-4mrxs" in "kube-system" namespace has status "Ready":"False"
I0201 09:09:59.643162 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:09:59.673360 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:00.068075 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:00.070504 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:00.142808 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:00.174432 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:00.568157 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:00.570918 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:00.643648 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:00.673539 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:01.068276 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:01.070506 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:01.146168 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:01.173527 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:01.568632 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:01.570309 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:01.641585 961265 pod_ready.go:102] pod "metrics-server-69cf46c98-4mrxs" in "kube-system" namespace has status "Ready":"False"
I0201 09:10:01.643383 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:01.673304 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:02.067873 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:02.070523 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:02.143081 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:02.172682 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:02.568220 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:02.570229 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:02.642546 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:02.674547 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:03.068480 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:03.069741 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:03.142389 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:03.172755 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:03.567765 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:03.569869 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:03.642982 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:03.673805 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:04.069360 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:04.070270 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:04.140234 961265 pod_ready.go:102] pod "metrics-server-69cf46c98-4mrxs" in "kube-system" namespace has status "Ready":"False"
I0201 09:10:04.142902 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:04.235144 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:04.568454 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:04.570377 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:04.643614 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:04.675683 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:05.067673 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:05.070115 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:05.143315 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:05.173386 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:05.567739 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:05.570444 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:05.642468 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:05.673482 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:06.068077 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:06.069731 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:06.140343 961265 pod_ready.go:102] pod "metrics-server-69cf46c98-4mrxs" in "kube-system" namespace has status "Ready":"False"
I0201 09:10:06.142486 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:06.172655 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:06.567298 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:06.569489 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:06.642235 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:06.673191 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:07.067816 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:07.069663 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:07.142169 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:07.172740 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:07.567520 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:07.571601 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:07.642269 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:07.675396 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:08.067830 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:08.070607 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:08.141246 961265 pod_ready.go:102] pod "metrics-server-69cf46c98-4mrxs" in "kube-system" namespace has status "Ready":"False"
I0201 09:10:08.142965 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:08.172603 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:08.567669 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:08.569829 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:08.642711 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:08.673342 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:09.069334 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:09.070567 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:09.142864 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:09.173718 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:09.568949 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:09.570246 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:09.643000 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:09.672867 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:10.067754 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:10.069957 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:10.142308 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:10.172553 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:10.567075 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:10.569736 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:10.641187 961265 pod_ready.go:102] pod "metrics-server-69cf46c98-4mrxs" in "kube-system" namespace has status "Ready":"False"
I0201 09:10:10.642999 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:10.672539 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:11.068519 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:11.070285 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:11.142608 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:11.173151 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:11.567852 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:11.570384 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:11.642756 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:11.674880 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:12.069228 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:12.069917 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:12.143328 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:12.173524 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:12.568644 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:12.570653 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:12.643196 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:12.673888 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:13.068115 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:13.071743 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:13.141545 961265 pod_ready.go:102] pod "metrics-server-69cf46c98-4mrxs" in "kube-system" namespace has status "Ready":"False"
I0201 09:10:13.143332 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:13.173547 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:13.568875 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:13.571131 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:13.643051 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:13.673750 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:14.068757 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:14.070032 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:14.143024 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:14.173223 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:14.568138 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:14.570834 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:14.643071 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:14.673047 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:15.067859 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:15.069905 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:15.142522 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:15.173180 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:15.568766 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:15.570080 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:15.640172 961265 pod_ready.go:102] pod "metrics-server-69cf46c98-4mrxs" in "kube-system" namespace has status "Ready":"False"
I0201 09:10:15.642682 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:15.673757 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:16.068378 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:16.069459 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:16.142627 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:16.174018 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:16.568023 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:16.570371 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:16.642526 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:16.672942 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:17.068009 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:17.070183 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:17.142162 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:17.172938 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:17.568041 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:17.570752 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:17.640484 961265 pod_ready.go:102] pod "metrics-server-69cf46c98-4mrxs" in "kube-system" namespace has status "Ready":"False"
I0201 09:10:17.642601 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:17.675078 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:18.067474 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:18.069972 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:18.139939 961265 pod_ready.go:92] pod "metrics-server-69cf46c98-4mrxs" in "kube-system" namespace has status "Ready":"True"
I0201 09:10:18.139964 961265 pod_ready.go:81] duration metric: took 23.005689248s waiting for pod "metrics-server-69cf46c98-4mrxs" in "kube-system" namespace to be "Ready" ...
I0201 09:10:18.139974 961265 pod_ready.go:78] waiting up to 6m0s for pod "nvidia-device-plugin-daemonset-p8cwv" in "kube-system" namespace to be "Ready" ...
I0201 09:10:18.142238 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:18.173044 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:18.567635 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:18.570076 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:18.643861 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:18.672708 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:19.067517 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:19.069861 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:19.144131 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:19.174069 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:19.569321 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:19.569723 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:19.643963 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:19.673076 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:20.068975 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:20.069592 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:20.143489 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:20.145699 961265 pod_ready.go:102] pod "nvidia-device-plugin-daemonset-p8cwv" in "kube-system" namespace has status "Ready":"False"
I0201 09:10:20.173269 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:20.568671 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
I0201 09:10:20.570056 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:20.643843 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:20.673524 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:21.068405 961265 kapi.go:107] duration metric: took 32.005393744s to wait for kubernetes.io/minikube-addons=registry ...
I0201 09:10:21.070771 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:21.143427 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:21.173493 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:21.569373 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:21.643744 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:21.673343 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:22.070296 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:22.143053 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:22.172873 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:22.570317 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:22.643360 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:22.646080 961265 pod_ready.go:102] pod "nvidia-device-plugin-daemonset-p8cwv" in "kube-system" namespace has status "Ready":"False"
I0201 09:10:22.673488 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:23.069847 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:23.143397 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:23.173586 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:23.569841 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:23.643635 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:23.673606 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:24.069748 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:24.143344 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:24.173335 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:24.569682 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:24.643052 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:24.673009 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:25.070329 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:25.143355 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:25.145689 961265 pod_ready.go:102] pod "nvidia-device-plugin-daemonset-p8cwv" in "kube-system" namespace has status "Ready":"False"
I0201 09:10:25.173824 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:25.570171 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:25.644116 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:25.676957 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:26.070855 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:26.143011 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:26.173156 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:26.570320 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:26.643108 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:26.673043 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:27.071039 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:27.144234 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:27.146919 961265 pod_ready.go:102] pod "nvidia-device-plugin-daemonset-p8cwv" in "kube-system" namespace has status "Ready":"False"
I0201 09:10:27.174140 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:27.636266 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:27.644439 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:27.741343 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:28.070923 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:28.144012 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:28.234195 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:28.571010 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:28.645270 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:28.674666 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:29.071312 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:29.144799 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:29.147336 961265 pod_ready.go:102] pod "nvidia-device-plugin-daemonset-p8cwv" in "kube-system" namespace has status "Ready":"False"
I0201 09:10:29.174280 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:29.571038 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:29.643453 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:29.674743 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:30.069940 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:30.144217 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:30.175056 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:30.570632 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:30.643251 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:30.674014 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:31.070151 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:31.143963 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:31.173517 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:31.570206 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:31.643795 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:31.646540 961265 pod_ready.go:102] pod "nvidia-device-plugin-daemonset-p8cwv" in "kube-system" namespace has status "Ready":"False"
I0201 09:10:31.673358 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:32.069655 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:32.143322 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:32.173946 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:32.570569 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:32.643808 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:32.673282 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:33.070593 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:33.143175 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:33.145557 961265 pod_ready.go:92] pod "nvidia-device-plugin-daemonset-p8cwv" in "kube-system" namespace has status "Ready":"True"
I0201 09:10:33.145581 961265 pod_ready.go:81] duration metric: took 15.005601068s waiting for pod "nvidia-device-plugin-daemonset-p8cwv" in "kube-system" namespace to be "Ready" ...
I0201 09:10:33.145608 961265 pod_ready.go:38] duration metric: took 41.981709701s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
I0201 09:10:33.145628 961265 api_server.go:52] waiting for apiserver process to appear ...
I0201 09:10:33.145688 961265 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I0201 09:10:33.160175 961265 api_server.go:72] duration metric: took 50.50863737s to wait for apiserver process to appear ...
I0201 09:10:33.160201 961265 api_server.go:88] waiting for apiserver healthz status ...
I0201 09:10:33.160222 961265 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
I0201 09:10:33.164769 961265 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
ok
I0201 09:10:33.165838 961265 api_server.go:141] control plane version: v1.28.4
I0201 09:10:33.165865 961265 api_server.go:131] duration metric: took 5.656371ms to wait for apiserver health ...
I0201 09:10:33.165873 961265 system_pods.go:43] waiting for kube-system pods to appear ...
I0201 09:10:33.172396 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:33.174141 961265 system_pods.go:59] 19 kube-system pods found
I0201 09:10:33.174166 961265 system_pods.go:61] "coredns-5dd5756b68-97z46" [41e764bb-a62c-4fd7-8f18-f194edf1d2d2] Running
I0201 09:10:33.174173 961265 system_pods.go:61] "csi-hostpath-attacher-0" [96c268bf-fa4c-448b-906b-909f387b0532] Pending / Ready:ContainersNotReady (containers with unready status: [csi-attacher]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-attacher])
I0201 09:10:33.174181 961265 system_pods.go:61] "csi-hostpath-resizer-0" [3f32b7c0-196f-405c-b9f2-8ce0ae761c2a] Pending / Ready:ContainersNotReady (containers with unready status: [csi-resizer]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-resizer])
I0201 09:10:33.174193 961265 system_pods.go:61] "csi-hostpathplugin-7h7xf" [24a559af-98e5-478e-9793-ad862550295b] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
I0201 09:10:33.174198 961265 system_pods.go:61] "etcd-addons-642352" [eebf2d48-e591-41e2-90e9-96c464002302] Running
I0201 09:10:33.174202 961265 system_pods.go:61] "kindnet-tmjnr" [08025e21-40c7-4ac1-af80-f9c1a9e8b0f9] Running
I0201 09:10:33.174209 961265 system_pods.go:61] "kube-apiserver-addons-642352" [9a0cb166-c0af-4db0-b54d-fd6a8b0f676b] Running
I0201 09:10:33.174213 961265 system_pods.go:61] "kube-controller-manager-addons-642352" [3d53e85e-ecd6-43a6-ae7b-deb6b6c88479] Running
I0201 09:10:33.174221 961265 system_pods.go:61] "kube-ingress-dns-minikube" [46dc9e1a-2137-442c-993a-921d6322672a] Running
I0201 09:10:33.174225 961265 system_pods.go:61] "kube-proxy-gzzdh" [53b46773-35c5-412f-984e-a49b361b13e1] Running
I0201 09:10:33.174231 961265 system_pods.go:61] "kube-scheduler-addons-642352" [2e89d318-279a-42f3-92c1-2c694dd8ca02] Running
I0201 09:10:33.174235 961265 system_pods.go:61] "metrics-server-69cf46c98-4mrxs" [d1e198e3-e716-4091-b76d-458a065b8206] Running
I0201 09:10:33.174241 961265 system_pods.go:61] "nvidia-device-plugin-daemonset-p8cwv" [e416cbdf-6552-406b-8891-00782080893a] Running
I0201 09:10:33.174245 961265 system_pods.go:61] "registry-proxy-tsccz" [5512787a-dabe-4019-aead-c68f8a431ce8] Running
I0201 09:10:33.174251 961265 system_pods.go:61] "registry-s2xzz" [4dea1280-3766-46b5-b712-24e29ff33b38] Running
I0201 09:10:33.174256 961265 system_pods.go:61] "snapshot-controller-58dbcc7b99-2qmjj" [0928963d-226a-4e66-b7ae-752f13b44c3b] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
I0201 09:10:33.174263 961265 system_pods.go:61] "snapshot-controller-58dbcc7b99-t8bsp" [888cb879-39ef-4b8e-ae14-dca7f4cab0bb] Running
I0201 09:10:33.174267 961265 system_pods.go:61] "storage-provisioner" [0153e690-ed67-497c-ad47-0666208720b1] Running
I0201 09:10:33.174272 961265 system_pods.go:61] "tiller-deploy-7b677967b9-q2zb9" [3d55d479-092d-4cdb-9344-110276a11056] Running
I0201 09:10:33.174278 961265 system_pods.go:74] duration metric: took 8.398838ms to wait for pod list to return data ...
I0201 09:10:33.174287 961265 default_sa.go:34] waiting for default service account to be created ...
I0201 09:10:33.176213 961265 default_sa.go:45] found service account: "default"
I0201 09:10:33.176231 961265 default_sa.go:55] duration metric: took 1.936428ms for default service account to be created ...
I0201 09:10:33.176238 961265 system_pods.go:116] waiting for k8s-apps to be running ...
I0201 09:10:33.183773 961265 system_pods.go:86] 19 kube-system pods found
I0201 09:10:33.183807 961265 system_pods.go:89] "coredns-5dd5756b68-97z46" [41e764bb-a62c-4fd7-8f18-f194edf1d2d2] Running
I0201 09:10:33.183825 961265 system_pods.go:89] "csi-hostpath-attacher-0" [96c268bf-fa4c-448b-906b-909f387b0532] Pending / Ready:ContainersNotReady (containers with unready status: [csi-attacher]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-attacher])
I0201 09:10:33.183834 961265 system_pods.go:89] "csi-hostpath-resizer-0" [3f32b7c0-196f-405c-b9f2-8ce0ae761c2a] Pending / Ready:ContainersNotReady (containers with unready status: [csi-resizer]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-resizer])
I0201 09:10:33.183846 961265 system_pods.go:89] "csi-hostpathplugin-7h7xf" [24a559af-98e5-478e-9793-ad862550295b] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
I0201 09:10:33.183858 961265 system_pods.go:89] "etcd-addons-642352" [eebf2d48-e591-41e2-90e9-96c464002302] Running
I0201 09:10:33.183867 961265 system_pods.go:89] "kindnet-tmjnr" [08025e21-40c7-4ac1-af80-f9c1a9e8b0f9] Running
I0201 09:10:33.183878 961265 system_pods.go:89] "kube-apiserver-addons-642352" [9a0cb166-c0af-4db0-b54d-fd6a8b0f676b] Running
I0201 09:10:33.183887 961265 system_pods.go:89] "kube-controller-manager-addons-642352" [3d53e85e-ecd6-43a6-ae7b-deb6b6c88479] Running
I0201 09:10:33.183898 961265 system_pods.go:89] "kube-ingress-dns-minikube" [46dc9e1a-2137-442c-993a-921d6322672a] Running
I0201 09:10:33.183908 961265 system_pods.go:89] "kube-proxy-gzzdh" [53b46773-35c5-412f-984e-a49b361b13e1] Running
I0201 09:10:33.183916 961265 system_pods.go:89] "kube-scheduler-addons-642352" [2e89d318-279a-42f3-92c1-2c694dd8ca02] Running
I0201 09:10:33.183926 961265 system_pods.go:89] "metrics-server-69cf46c98-4mrxs" [d1e198e3-e716-4091-b76d-458a065b8206] Running
I0201 09:10:33.183937 961265 system_pods.go:89] "nvidia-device-plugin-daemonset-p8cwv" [e416cbdf-6552-406b-8891-00782080893a] Running
I0201 09:10:33.183946 961265 system_pods.go:89] "registry-proxy-tsccz" [5512787a-dabe-4019-aead-c68f8a431ce8] Running
I0201 09:10:33.183954 961265 system_pods.go:89] "registry-s2xzz" [4dea1280-3766-46b5-b712-24e29ff33b38] Running
I0201 09:10:33.183967 961265 system_pods.go:89] "snapshot-controller-58dbcc7b99-2qmjj" [0928963d-226a-4e66-b7ae-752f13b44c3b] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
I0201 09:10:33.183977 961265 system_pods.go:89] "snapshot-controller-58dbcc7b99-t8bsp" [888cb879-39ef-4b8e-ae14-dca7f4cab0bb] Running
I0201 09:10:33.183988 961265 system_pods.go:89] "storage-provisioner" [0153e690-ed67-497c-ad47-0666208720b1] Running
I0201 09:10:33.183995 961265 system_pods.go:89] "tiller-deploy-7b677967b9-q2zb9" [3d55d479-092d-4cdb-9344-110276a11056] Running
I0201 09:10:33.184011 961265 system_pods.go:126] duration metric: took 7.762385ms to wait for k8s-apps to be running ...
I0201 09:10:33.184024 961265 system_svc.go:44] waiting for kubelet service to be running ....
I0201 09:10:33.184084 961265 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
I0201 09:10:33.195481 961265 system_svc.go:56] duration metric: took 11.449535ms WaitForService to wait for kubelet.
I0201 09:10:33.195511 961265 kubeadm.go:581] duration metric: took 50.543977334s to wait for : map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] ...
I0201 09:10:33.195543 961265 node_conditions.go:102] verifying NodePressure condition ...
I0201 09:10:33.198576 961265 node_conditions.go:122] node storage ephemeral capacity is 304681132Ki
I0201 09:10:33.198609 961265 node_conditions.go:123] node cpu capacity is 8
I0201 09:10:33.198623 961265 node_conditions.go:105] duration metric: took 3.074639ms to run NodePressure ...
I0201 09:10:33.198639 961265 start.go:228] waiting for startup goroutines ...
I0201 09:10:33.570667 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:33.643263 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:33.672946 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:34.070150 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:34.143765 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:34.173229 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:34.570089 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:34.643485 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:34.673247 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:35.070841 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:35.143302 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:35.172938 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:35.570758 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:35.643935 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:35.674113 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:36.071215 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:36.144595 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:36.174016 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:36.570259 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:36.643524 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:36.673564 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:37.069759 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:37.143648 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:37.173848 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:37.570114 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:37.643869 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:37.675432 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:38.070268 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:38.143826 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:38.173645 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:38.570288 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:38.643767 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:38.673255 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:39.070044 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:39.143972 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:39.173980 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:39.570587 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:39.643811 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:39.673555 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:40.069982 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:40.143791 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:40.173925 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:40.569918 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:40.643635 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:40.673005 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:41.070437 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:41.143299 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:41.174359 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:41.570847 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:41.643622 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:41.674802 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:42.070608 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:42.143782 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:42.173459 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:42.571297 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:42.644209 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:42.673539 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:43.070739 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:43.143287 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:43.174920 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:43.570204 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:43.643605 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:43.673256 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:44.072704 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:44.142795 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:44.173411 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:44.570894 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:44.643433 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:44.673731 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:45.069555 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:45.143616 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:45.175263 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:45.570783 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:45.643782 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:45.673322 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:46.071090 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:46.143483 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:46.173236 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:46.570480 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:46.642905 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:46.672443 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:47.070134 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:47.144332 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:47.173128 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:47.570287 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:47.643758 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:47.739119 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:48.142669 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:48.145856 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:48.238593 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:48.642019 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:48.645562 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:48.734962 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:49.143850 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:49.154272 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:49.239630 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:49.634366 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:49.644994 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:49.737020 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:50.071273 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:50.144261 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:50.173458 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:50.570104 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:50.644574 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:50.674123 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:51.070116 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:51.143792 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:51.174046 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:51.570319 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:51.643362 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:51.673279 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:52.070945 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:52.144625 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:52.173782 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:52.570237 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:52.644681 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:52.674073 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:53.070500 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:53.143218 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:53.173396 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:53.570548 961265 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
I0201 09:10:53.643050 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:53.672606 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:54.070845 961265 kapi.go:107] duration metric: took 1m5.012922053s to wait for app.kubernetes.io/name=ingress-nginx ...
I0201 09:10:54.143308 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:54.173477 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:54.643228 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:54.672931 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:55.144108 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:55.173366 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:55.643810 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:55.676846 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:56.144185 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:56.173206 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:56.644110 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:56.673190 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:57.143750 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:57.173864 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:57.644226 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:57.673932 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:58.143621 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:58.173579 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:58.643715 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:58.673511 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:59.143404 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:59.173107 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:10:59.643079 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:10:59.673064 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:11:00.143318 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:11:00.172971 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:11:00.644066 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:11:00.672604 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:11:01.144012 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:11:01.173271 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:11:01.643349 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
I0201 09:11:01.673604 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:11:02.143783 961265 kapi.go:107] duration metric: took 1m9.004337806s to wait for kubernetes.io/minikube-addons=gcp-auth ...
I0201 09:11:02.145556 961265 out.go:177] * Your GCP credentials will now be mounted into every pod created in the addons-642352 cluster.
I0201 09:11:02.147293 961265 out.go:177] * If you don't want your credentials mounted into a specific pod, add a label with the `gcp-auth-skip-secret` key to your pod configuration.
I0201 09:11:02.148601 961265 out.go:177] * If you want existing pods to be mounted with credentials, either recreate them or rerun addons enable with --refresh.
I0201 09:11:02.174098 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:11:02.672628 961265 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
I0201 09:11:03.174514 961265 kapi.go:107] duration metric: took 1m12.009813779s to wait for kubernetes.io/minikube-addons=csi-hostpath-driver ...
I0201 09:11:03.188541 961265 out.go:177] * Enabled addons: storage-provisioner, nvidia-device-plugin, ingress-dns, cloud-spanner, metrics-server, helm-tiller, inspektor-gadget, yakd, storage-provisioner-rancher, volumesnapshots, registry, ingress, gcp-auth, csi-hostpath-driver
I0201 09:11:03.193154 961265 addons.go:505] enable addons completed in 1m21.074780834s: enabled=[storage-provisioner nvidia-device-plugin ingress-dns cloud-spanner metrics-server helm-tiller inspektor-gadget yakd storage-provisioner-rancher volumesnapshots registry ingress gcp-auth csi-hostpath-driver]
I0201 09:11:03.193234 961265 start.go:233] waiting for cluster config update ...
I0201 09:11:03.193263 961265 start.go:242] writing updated cluster config ...
I0201 09:11:03.193553 961265 ssh_runner.go:195] Run: rm -f paused
I0201 09:11:03.252732 961265 start.go:600] kubectl: 1.29.1, cluster: 1.28.4 (minor skew: 1)
I0201 09:11:03.308395 961265 out.go:177] * Done! kubectl is now configured to use "addons-642352" cluster and "default" namespace by default
==> CRI-O <==
Feb 01 09:13:36 addons-642352 crio[948]: time="2024-02-01 09:13:36.386191787Z" level=info msg="Removing container: a31ea1a1a3ab2ccbc2c39f6a89f378d7925c22580589ee76e178d6cb94e47c7f" id=1f7dcfa1-2bd2-4e4e-b746-a1c0c7ffbe7d name=/runtime.v1.RuntimeService/RemoveContainer
Feb 01 09:13:36 addons-642352 crio[948]: time="2024-02-01 09:13:36.400113163Z" level=info msg="Removed container a31ea1a1a3ab2ccbc2c39f6a89f378d7925c22580589ee76e178d6cb94e47c7f: kube-system/kube-ingress-dns-minikube/minikube-ingress-dns" id=1f7dcfa1-2bd2-4e4e-b746-a1c0c7ffbe7d name=/runtime.v1.RuntimeService/RemoveContainer
Feb 01 09:13:37 addons-642352 crio[948]: time="2024-02-01 09:13:37.867129577Z" level=info msg="Pulled image: gcr.io/google-samples/hello-app@sha256:b1455e1c4fcc5ea1023c9e3b584cd84b64eb920e332feff690a2829696e379e7" id=c13ecd9d-7d85-4d68-9179-e1a5c821efa6 name=/runtime.v1.ImageService/PullImage
Feb 01 09:13:37 addons-642352 crio[948]: time="2024-02-01 09:13:37.868174777Z" level=info msg="Checking image status: gcr.io/google-samples/hello-app:1.0" id=f97447e9-0c88-473f-8772-51c6bf194f2a name=/runtime.v1.ImageService/ImageStatus
Feb 01 09:13:37 addons-642352 crio[948]: time="2024-02-01 09:13:37.869152573Z" level=info msg="Image status: &ImageStatusResponse{Image:&Image{Id:dd1b12fcb60978ac32686ef6732d56f612c8636ef86693c09613946a54c69d79,RepoTags:[gcr.io/google-samples/hello-app:1.0],RepoDigests:[gcr.io/google-samples/hello-app@sha256:b1455e1c4fcc5ea1023c9e3b584cd84b64eb920e332feff690a2829696e379e7],Size_:28999827,Uid:nil,Username:nonroot,Spec:nil,},Info:map[string]string{},}" id=f97447e9-0c88-473f-8772-51c6bf194f2a name=/runtime.v1.ImageService/ImageStatus
Feb 01 09:13:37 addons-642352 crio[948]: time="2024-02-01 09:13:37.870063099Z" level=info msg="Creating container: default/hello-world-app-5d77478584-gtfbn/hello-world-app" id=501cdf47-a252-483a-a20e-a10a4c35a2b1 name=/runtime.v1.RuntimeService/CreateContainer
Feb 01 09:13:37 addons-642352 crio[948]: time="2024-02-01 09:13:37.870180633Z" level=warning msg="Allowed annotations are specified for workload []"
Feb 01 09:13:37 addons-642352 crio[948]: time="2024-02-01 09:13:37.923192699Z" level=info msg="Created container 347b77d2a14a6979a39f9564ffad433f76db36ec57884c497a820308a3213516: default/hello-world-app-5d77478584-gtfbn/hello-world-app" id=501cdf47-a252-483a-a20e-a10a4c35a2b1 name=/runtime.v1.RuntimeService/CreateContainer
Feb 01 09:13:37 addons-642352 crio[948]: time="2024-02-01 09:13:37.923873081Z" level=info msg="Starting container: 347b77d2a14a6979a39f9564ffad433f76db36ec57884c497a820308a3213516" id=cbb20a3d-f248-499d-8261-e0649f00af21 name=/runtime.v1.RuntimeService/StartContainer
Feb 01 09:13:37 addons-642352 crio[948]: time="2024-02-01 09:13:37.930866989Z" level=info msg="Started container" PID=9937 containerID=347b77d2a14a6979a39f9564ffad433f76db36ec57884c497a820308a3213516 description=default/hello-world-app-5d77478584-gtfbn/hello-world-app id=cbb20a3d-f248-499d-8261-e0649f00af21 name=/runtime.v1.RuntimeService/StartContainer sandboxID=4d9aebf8b2e08c5572124f0f53f61f2a4e6927e5b9bbd2a1c1a33c9004617ab9
Feb 01 09:13:38 addons-642352 crio[948]: time="2024-02-01 09:13:38.291128854Z" level=info msg="Stopping container: a54df7cee08c7b932964eb5a1c87d9fa818f1e82d70b34643f0cf4e6a340d4c0 (timeout: 2s)" id=95defedc-3c6e-47c9-a48e-33c895b5fcf5 name=/runtime.v1.RuntimeService/StopContainer
Feb 01 09:13:40 addons-642352 crio[948]: time="2024-02-01 09:13:40.298253084Z" level=warning msg="Stopping container a54df7cee08c7b932964eb5a1c87d9fa818f1e82d70b34643f0cf4e6a340d4c0 with stop signal timed out: timeout reached after 2 seconds waiting for container process to exit" id=95defedc-3c6e-47c9-a48e-33c895b5fcf5 name=/runtime.v1.RuntimeService/StopContainer
Feb 01 09:13:40 addons-642352 conmon[5623]: conmon a54df7cee08c7b932964 <ninfo>: container 5635 exited with status 137
Feb 01 09:13:40 addons-642352 crio[948]: time="2024-02-01 09:13:40.430758282Z" level=info msg="Stopped container a54df7cee08c7b932964eb5a1c87d9fa818f1e82d70b34643f0cf4e6a340d4c0: ingress-nginx/ingress-nginx-controller-69cff4fd79-wjblf/controller" id=95defedc-3c6e-47c9-a48e-33c895b5fcf5 name=/runtime.v1.RuntimeService/StopContainer
Feb 01 09:13:40 addons-642352 crio[948]: time="2024-02-01 09:13:40.431386788Z" level=info msg="Stopping pod sandbox: b1649949adeaff9255205a7be6424dba8cd4e188a073ca72a72faae82b83b00f" id=b7a240d9-741d-400a-a790-7ef7f9298258 name=/runtime.v1.RuntimeService/StopPodSandbox
Feb 01 09:13:40 addons-642352 crio[948]: time="2024-02-01 09:13:40.434524516Z" level=info msg="Restoring iptables rules: *nat\n:KUBE-HP-C3HLGNYLL3XREXHI - [0:0]\n:KUBE-HP-MT2XKA3F5EUNWKL2 - [0:0]\n:KUBE-HOSTPORTS - [0:0]\n-X KUBE-HP-C3HLGNYLL3XREXHI\n-X KUBE-HP-MT2XKA3F5EUNWKL2\nCOMMIT\n"
Feb 01 09:13:40 addons-642352 crio[948]: time="2024-02-01 09:13:40.435953658Z" level=info msg="Closing host port tcp:80"
Feb 01 09:13:40 addons-642352 crio[948]: time="2024-02-01 09:13:40.436000796Z" level=info msg="Closing host port tcp:443"
Feb 01 09:13:40 addons-642352 crio[948]: time="2024-02-01 09:13:40.437491331Z" level=info msg="Host port tcp:80 does not have an open socket"
Feb 01 09:13:40 addons-642352 crio[948]: time="2024-02-01 09:13:40.437513882Z" level=info msg="Host port tcp:443 does not have an open socket"
Feb 01 09:13:40 addons-642352 crio[948]: time="2024-02-01 09:13:40.437658270Z" level=info msg="Got pod network &{Name:ingress-nginx-controller-69cff4fd79-wjblf Namespace:ingress-nginx ID:b1649949adeaff9255205a7be6424dba8cd4e188a073ca72a72faae82b83b00f UID:cc254d8c-527a-49b3-8571-5f674630e01b NetNS:/var/run/netns/e092536f-f4ec-44b0-8531-7906092d23b5 Networks:[{Name:kindnet Ifname:eth0}] RuntimeConfig:map[kindnet:{IP: MAC: PortMappings:[] Bandwidth:<nil> IpRanges:[]}] Aliases:map[]}"
Feb 01 09:13:40 addons-642352 crio[948]: time="2024-02-01 09:13:40.437773908Z" level=info msg="Deleting pod ingress-nginx_ingress-nginx-controller-69cff4fd79-wjblf from CNI network \"kindnet\" (type=ptp)"
Feb 01 09:13:40 addons-642352 crio[948]: time="2024-02-01 09:13:40.464137761Z" level=info msg="Stopped pod sandbox: b1649949adeaff9255205a7be6424dba8cd4e188a073ca72a72faae82b83b00f" id=b7a240d9-741d-400a-a790-7ef7f9298258 name=/runtime.v1.RuntimeService/StopPodSandbox
Feb 01 09:13:41 addons-642352 crio[948]: time="2024-02-01 09:13:41.402445581Z" level=info msg="Removing container: a54df7cee08c7b932964eb5a1c87d9fa818f1e82d70b34643f0cf4e6a340d4c0" id=d0eca928-e229-4e43-ba13-7457b97a2fbb name=/runtime.v1.RuntimeService/RemoveContainer
Feb 01 09:13:41 addons-642352 crio[948]: time="2024-02-01 09:13:41.416178699Z" level=info msg="Removed container a54df7cee08c7b932964eb5a1c87d9fa818f1e82d70b34643f0cf4e6a340d4c0: ingress-nginx/ingress-nginx-controller-69cff4fd79-wjblf/controller" id=d0eca928-e229-4e43-ba13-7457b97a2fbb name=/runtime.v1.RuntimeService/RemoveContainer
==> container status <==
CONTAINER IMAGE CREATED STATE NAME ATTEMPT POD ID POD
347b77d2a14a6 gcr.io/google-samples/hello-app@sha256:b1455e1c4fcc5ea1023c9e3b584cd84b64eb920e332feff690a2829696e379e7 7 seconds ago Running hello-world-app 0 4d9aebf8b2e08 hello-world-app-5d77478584-gtfbn
b5de33697084d ghcr.io/headlamp-k8s/headlamp@sha256:3c6da859a989f285b2fd2ac2f4763d1884d54a51e4405301e5324e0b2b70bd67 2 minutes ago Running headlamp 0 a7284d8df0deb headlamp-7ddfbb94ff-r8fgz
46c0a8ea0fcda docker.io/library/nginx@sha256:156d75f07c59b2fd59d3d1470631777943bb574135214f0a90c7bb82bde916da 2 minutes ago Running nginx 0 a672d31333168 nginx
86d3f36b801a3 gcr.io/k8s-minikube/gcp-auth-webhook@sha256:3e92b3d1c15220ae0f2f3505fb3a88899a1e48ec85fb777a1a4945ae9db2ce06 2 minutes ago Running gcp-auth 0 568ec81f16e6e gcp-auth-d4c87556c-spt5m
d626686bd4bea registry.k8s.io/ingress-nginx/kube-webhook-certgen@sha256:29318c6957228dc10feb67fed5b91bdd8a9e3279e5b29c5965b9bd31a01ee385 3 minutes ago Exited patch 0 b0a2702e24675 ingress-nginx-admission-patch-qjn58
3aca81a96d9b6 docker.io/marcnuri/yakd@sha256:a3f540278e4c11373e15605311851dd9c64d208f4d63e727bccc0e39f9329310 3 minutes ago Running yakd 0 1dd7012db7778 yakd-dashboard-9947fc6bf-sh5hc
656023fe3762d registry.k8s.io/ingress-nginx/kube-webhook-certgen@sha256:29318c6957228dc10feb67fed5b91bdd8a9e3279e5b29c5965b9bd31a01ee385 3 minutes ago Exited create 0 c5becef116afe ingress-nginx-admission-create-tkjz7
cb13d265fb782 ead0a4a53df89fd173874b46093b6e62d8c72967bbf606d672c9e8c9b601a4fc 3 minutes ago Running coredns 0 296fc67044096 coredns-5dd5756b68-97z46
dba6c9646ec96 6e38f40d628db3002f5617342c8872c935de530d867d0f709a2fbda1a302a562 3 minutes ago Running storage-provisioner 0 6e0ad141a0a31 storage-provisioner
6a9b051d431b7 83f6cc407eed88d214aad97f3539bde5c8e485ff14424cd021a3a2899304398e 4 minutes ago Running kube-proxy 0 ad647137b93da kube-proxy-gzzdh
eb07b25a2d8b0 c7d1297425461d3e24fe0ba658818593be65d13a2dd45a4c02d8768d6c8c18cc 4 minutes ago Running kindnet-cni 0 da184db6edba4 kindnet-tmjnr
7949a0c4d0897 d058aa5ab969ce7b84d25e7188be1f80633b18db8ea7d02d9d0a78e676236591 4 minutes ago Running kube-controller-manager 0 f34a6b9ff244c kube-controller-manager-addons-642352
4b243027fc21a 7fe0e6f37db33464725e616a12ccc4e36970370005a2b09683a974db6350c257 4 minutes ago Running kube-apiserver 0 34e3ea3b971e1 kube-apiserver-addons-642352
beec74884138d 73deb9a3f702532592a4167455f8bf2e5f5d900bcc959ba2fd2d35c321de1af9 4 minutes ago Running etcd 0 0aa72274b02fa etcd-addons-642352
53d39c795f697 e3db313c6dbc065d4ac3b32c7a6f2a878949031b881d217b63881a109c5cfba1 4 minutes ago Running kube-scheduler 0 57761897b1c5d kube-scheduler-addons-642352
==> coredns [cb13d265fb7823c84d4e8283453786725591f489ac76c41ce95425908fe2b609] <==
[INFO] 10.244.0.5:41901 - 3106 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000088023s
[INFO] 10.244.0.5:54830 - 51546 "A IN registry.kube-system.svc.cluster.local.europe-west1-b.c.k8s-minikube.internal. udp 95 false 512" NXDOMAIN qr,rd,ra 95 0.017639334s
[INFO] 10.244.0.5:54830 - 16984 "AAAA IN registry.kube-system.svc.cluster.local.europe-west1-b.c.k8s-minikube.internal. udp 95 false 512" NXDOMAIN qr,rd,ra 95 0.0178536s
[INFO] 10.244.0.5:33409 - 40436 "AAAA IN registry.kube-system.svc.cluster.local.c.k8s-minikube.internal. udp 80 false 512" NXDOMAIN qr,rd,ra 80 0.00405879s
[INFO] 10.244.0.5:33409 - 6640 "A IN registry.kube-system.svc.cluster.local.c.k8s-minikube.internal. udp 80 false 512" NXDOMAIN qr,rd,ra 80 0.006100739s
[INFO] 10.244.0.5:45619 - 46557 "A IN registry.kube-system.svc.cluster.local.google.internal. udp 72 false 512" NXDOMAIN qr,rd,ra 72 0.004198021s
[INFO] 10.244.0.5:45619 - 56785 "AAAA IN registry.kube-system.svc.cluster.local.google.internal. udp 72 false 512" NXDOMAIN qr,rd,ra 72 0.004575804s
[INFO] 10.244.0.5:34675 - 55844 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000090428s
[INFO] 10.244.0.5:34675 - 15656 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000139578s
[INFO] 10.244.0.21:51216 - 49778 "A IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.000240687s
[INFO] 10.244.0.21:49044 - 39579 "AAAA IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.000346888s
[INFO] 10.244.0.21:34464 - 15548 "AAAA IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000130687s
[INFO] 10.244.0.21:45942 - 8445 "A IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000162545s
[INFO] 10.244.0.21:43096 - 53856 "A IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.000123833s
[INFO] 10.244.0.21:44422 - 22683 "AAAA IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.0001249s
[INFO] 10.244.0.21:43059 - 34072 "AAAA IN storage.googleapis.com.europe-west1-b.c.k8s-minikube.internal. udp 90 false 1232" NXDOMAIN qr,rd,ra 79 0.007032667s
[INFO] 10.244.0.21:34827 - 45320 "A IN storage.googleapis.com.europe-west1-b.c.k8s-minikube.internal. udp 90 false 1232" NXDOMAIN qr,rd,ra 79 0.007427969s
[INFO] 10.244.0.21:60004 - 21183 "A IN storage.googleapis.com.c.k8s-minikube.internal. udp 75 false 1232" NXDOMAIN qr,rd,ra 64 0.006951376s
[INFO] 10.244.0.21:37730 - 60854 "AAAA IN storage.googleapis.com.c.k8s-minikube.internal. udp 75 false 1232" NXDOMAIN qr,rd,ra 64 0.007389154s
[INFO] 10.244.0.21:60905 - 31477 "A IN storage.googleapis.com.google.internal. udp 67 false 1232" NXDOMAIN qr,rd,ra 56 0.005598274s
[INFO] 10.244.0.21:48479 - 13762 "AAAA IN storage.googleapis.com.google.internal. udp 67 false 1232" NXDOMAIN qr,rd,ra 56 0.005956682s
[INFO] 10.244.0.21:49662 - 9655 "A IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 116 0.000765978s
[INFO] 10.244.0.21:60627 - 43484 "AAAA IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 140 0.000764124s
[INFO] 10.244.0.23:34852 - 2 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000178908s
[INFO] 10.244.0.23:47272 - 3 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000147122s
==> describe nodes <==
Name: addons-642352
Roles: control-plane
Labels: beta.kubernetes.io/arch=amd64
beta.kubernetes.io/os=linux
kubernetes.io/arch=amd64
kubernetes.io/hostname=addons-642352
kubernetes.io/os=linux
minikube.k8s.io/commit=de6311e496aefb62bd53fcfd0fb6b150999d9424
minikube.k8s.io/name=addons-642352
minikube.k8s.io/primary=true
minikube.k8s.io/updated_at=2024_02_01T09_09_29_0700
minikube.k8s.io/version=v1.32.0
node-role.kubernetes.io/control-plane=
node.kubernetes.io/exclude-from-external-load-balancers=
topology.hostpath.csi/node=addons-642352
Annotations: kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/crio/crio.sock
node.alpha.kubernetes.io/ttl: 0
volumes.kubernetes.io/controller-managed-attach-detach: true
CreationTimestamp: Thu, 01 Feb 2024 09:09:26 +0000
Taints: <none>
Unschedulable: false
Lease:
HolderIdentity: addons-642352
AcquireTime: <unset>
RenewTime: Thu, 01 Feb 2024 09:13:43 +0000
Conditions:
Type Status LastHeartbeatTime LastTransitionTime Reason Message
---- ------ ----------------- ------------------ ------ -------
MemoryPressure False Thu, 01 Feb 2024 09:12:32 +0000 Thu, 01 Feb 2024 09:09:24 +0000 KubeletHasSufficientMemory kubelet has sufficient memory available
DiskPressure False Thu, 01 Feb 2024 09:12:32 +0000 Thu, 01 Feb 2024 09:09:24 +0000 KubeletHasNoDiskPressure kubelet has no disk pressure
PIDPressure False Thu, 01 Feb 2024 09:12:32 +0000 Thu, 01 Feb 2024 09:09:24 +0000 KubeletHasSufficientPID kubelet has sufficient PID available
Ready True Thu, 01 Feb 2024 09:12:32 +0000 Thu, 01 Feb 2024 09:09:50 +0000 KubeletReady kubelet is posting ready status
Addresses:
InternalIP: 192.168.49.2
Hostname: addons-642352
Capacity:
cpu: 8
ephemeral-storage: 304681132Ki
hugepages-1Gi: 0
hugepages-2Mi: 0
memory: 32859424Ki
pods: 110
Allocatable:
cpu: 8
ephemeral-storage: 304681132Ki
hugepages-1Gi: 0
hugepages-2Mi: 0
memory: 32859424Ki
pods: 110
System Info:
Machine ID: d1f9f735de744badb7d7784b0c83c999
System UUID: 8c0c6ac3-e918-4ce9-aba0-88ccb0d38e3a
Boot ID: 2cfa37ec-936f-4f6f-8415-4c1cf32697e8
Kernel Version: 5.15.0-1049-gcp
OS Image: Ubuntu 22.04.3 LTS
Operating System: linux
Architecture: amd64
Container Runtime Version: cri-o://1.24.6
Kubelet Version: v1.28.4
Kube-Proxy Version: v1.28.4
PodCIDR: 10.244.0.0/24
PodCIDRs: 10.244.0.0/24
Non-terminated Pods: (13 in total)
Namespace Name CPU Requests CPU Limits Memory Requests Memory Limits Age
--------- ---- ------------ ---------- --------------- ------------- ---
default hello-world-app-5d77478584-gtfbn 0 (0%!)(MISSING) 0 (0%!)(MISSING) 0 (0%!)(MISSING) 0 (0%!)(MISSING) 10s
default nginx 0 (0%!)(MISSING) 0 (0%!)(MISSING) 0 (0%!)(MISSING) 0 (0%!)(MISSING) 2m32s
gcp-auth gcp-auth-d4c87556c-spt5m 0 (0%!)(MISSING) 0 (0%!)(MISSING) 0 (0%!)(MISSING) 0 (0%!)(MISSING) 3m53s
headlamp headlamp-7ddfbb94ff-r8fgz 0 (0%!)(MISSING) 0 (0%!)(MISSING) 0 (0%!)(MISSING) 0 (0%!)(MISSING) 2m13s
kube-system coredns-5dd5756b68-97z46 100m (1%!)(MISSING) 0 (0%!)(MISSING) 70Mi (0%!)(MISSING) 170Mi (0%!)(MISSING) 4m3s
kube-system etcd-addons-642352 100m (1%!)(MISSING) 0 (0%!)(MISSING) 100Mi (0%!)(MISSING) 0 (0%!)(MISSING) 4m16s
kube-system kindnet-tmjnr 100m (1%!)(MISSING) 100m (1%!)(MISSING) 50Mi (0%!)(MISSING) 50Mi (0%!)(MISSING) 4m4s
kube-system kube-apiserver-addons-642352 250m (3%!)(MISSING) 0 (0%!)(MISSING) 0 (0%!)(MISSING) 0 (0%!)(MISSING) 4m16s
kube-system kube-controller-manager-addons-642352 200m (2%!)(MISSING) 0 (0%!)(MISSING) 0 (0%!)(MISSING) 0 (0%!)(MISSING) 4m18s
kube-system kube-proxy-gzzdh 0 (0%!)(MISSING) 0 (0%!)(MISSING) 0 (0%!)(MISSING) 0 (0%!)(MISSING) 4m4s
kube-system kube-scheduler-addons-642352 100m (1%!)(MISSING) 0 (0%!)(MISSING) 0 (0%!)(MISSING) 0 (0%!)(MISSING) 4m16s
kube-system storage-provisioner 0 (0%!)(MISSING) 0 (0%!)(MISSING) 0 (0%!)(MISSING) 0 (0%!)(MISSING) 3m58s
yakd-dashboard yakd-dashboard-9947fc6bf-sh5hc 0 (0%!)(MISSING) 0 (0%!)(MISSING) 128Mi (0%!)(MISSING) 256Mi (0%!)(MISSING) 3m57s
Allocated resources:
(Total limits may be over 100 percent, i.e., overcommitted.)
Resource Requests Limits
-------- -------- ------
cpu 850m (10%!)(MISSING) 100m (1%!)(MISSING)
memory 348Mi (1%!)(MISSING) 476Mi (1%!)(MISSING)
ephemeral-storage 0 (0%!)(MISSING) 0 (0%!)(MISSING)
hugepages-1Gi 0 (0%!)(MISSING) 0 (0%!)(MISSING)
hugepages-2Mi 0 (0%!)(MISSING) 0 (0%!)(MISSING)
Events:
Type Reason Age From Message
---- ------ ---- ---- -------
Normal Starting 3m58s kube-proxy
Normal Starting 4m17s kubelet Starting kubelet.
Normal NodeHasSufficientMemory 4m16s kubelet Node addons-642352 status is now: NodeHasSufficientMemory
Normal NodeHasNoDiskPressure 4m16s kubelet Node addons-642352 status is now: NodeHasNoDiskPressure
Normal NodeHasSufficientPID 4m16s kubelet Node addons-642352 status is now: NodeHasSufficientPID
Normal RegisteredNode 4m4s node-controller Node addons-642352 event: Registered Node addons-642352 in Controller
Normal NodeReady 3m55s kubelet Node addons-642352 status is now: NodeReady
==> dmesg <==
[ +0.000009] ll header: 00000000: 02 42 61 50 e0 51 02 42 c0 a8 5e 02 08 00
[ +6.651458] IPv4: martian source 10.244.0.2 from 10.96.0.1, on dev br-c4cb2b33b568
[ +0.000006] ll header: 00000000: 02 42 61 50 e0 51 02 42 c0 a8 5e 02 08 00
[ +4.867689] IPv4: martian source 10.244.0.3 from 10.96.0.1, on dev br-f771dc58cf4e
[ +0.000006] ll header: 00000000: 02 42 7d 83 19 b2 02 42 c0 a8 4c 02 08 00
[ +8.443392] IPv4: martian source 10.244.0.2 from 10.96.0.1, on dev br-c4cb2b33b568
[ +0.000007] ll header: 00000000: 02 42 61 50 e0 51 02 42 c0 a8 5e 02 08 00
[ +3.839713] IPv4: martian source 10.244.0.3 from 10.96.0.1, on dev br-31cd09de568c
[ +0.000006] ll header: 00000000: 02 42 c7 d7 50 77 02 42 c0 a8 55 02 08 00
[ +0.000026] IPv4: martian source 10.244.0.5 from 10.96.0.1, on dev br-31cd09de568c
[ +0.000005] ll header: 00000000: 02 42 c7 d7 50 77 02 42 c0 a8 55 02 08 00
[Feb 1 09:11] IPv4: martian source 10.244.0.20 from 127.0.0.1, on dev eth0
[ +0.000006] ll header: 00000000: 8a 0e 69 7c fe 07 12 e6 eb b8 95 32 08 00
[ +1.015423] IPv4: martian source 10.244.0.20 from 127.0.0.1, on dev eth0
[ +0.000006] ll header: 00000000: 8a 0e 69 7c fe 07 12 e6 eb b8 95 32 08 00
[ +2.011906] IPv4: martian source 10.244.0.20 from 127.0.0.1, on dev eth0
[ +0.000031] ll header: 00000000: 8a 0e 69 7c fe 07 12 e6 eb b8 95 32 08 00
[ +4.227598] IPv4: martian source 10.244.0.20 from 127.0.0.1, on dev eth0
[ +0.000006] ll header: 00000000: 8a 0e 69 7c fe 07 12 e6 eb b8 95 32 08 00
[ +8.187454] IPv4: martian source 10.244.0.20 from 127.0.0.1, on dev eth0
[ +0.000006] ll header: 00000000: 8a 0e 69 7c fe 07 12 e6 eb b8 95 32 08 00
[ +16.126900] IPv4: martian source 10.244.0.20 from 127.0.0.1, on dev eth0
[ +0.000006] ll header: 00000000: 8a 0e 69 7c fe 07 12 e6 eb b8 95 32 08 00
[Feb 1 09:12] IPv4: martian source 10.244.0.20 from 127.0.0.1, on dev eth0
[ +0.000008] ll header: 00000000: 8a 0e 69 7c fe 07 12 e6 eb b8 95 32 08 00
==> etcd [beec74884138db779f07ac3c265d22c7f518d12c984ba722d66f0a9269a83455] <==
{"level":"warn","ts":"2024-02-01T09:09:45.238551Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"190.321487ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/pods/kube-system/kindnet-tmjnr\" ","response":"range_response_count:1 size:4698"}
{"level":"info","ts":"2024-02-01T09:09:45.243734Z","caller":"traceutil/trace.go:171","msg":"trace[525081933] range","detail":"{range_begin:/registry/pods/kube-system/kindnet-tmjnr; range_end:; response_count:1; response_revision:385; }","duration":"195.512587ms","start":"2024-02-01T09:09:45.048203Z","end":"2024-02-01T09:09:45.243716Z","steps":["trace[525081933] 'agreement among raft nodes before linearized reading' (duration: 186.429427ms)"],"step_count":1}
{"level":"warn","ts":"2024-02-01T09:09:45.244208Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"203.480173ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/health\" ","response":"range_response_count:0 size:5"}
{"level":"info","ts":"2024-02-01T09:09:45.244303Z","caller":"traceutil/trace.go:171","msg":"trace[1124320874] range","detail":"{range_begin:/registry/health; range_end:; response_count:0; response_revision:385; }","duration":"213.184579ms","start":"2024-02-01T09:09:45.031106Z","end":"2024-02-01T09:09:45.244291Z","steps":["trace[1124320874] 'agreement among raft nodes before linearized reading' (duration: 203.448507ms)"],"step_count":1}
{"level":"warn","ts":"2024-02-01T09:09:45.834598Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"103.136233ms","expected-duration":"100ms","prefix":"","request":"header:<ID:8128026887832517508 username:\"kube-apiserver-etcd-client\" auth_revision:1 > txn:<compare:<target:MOD key:\"/registry/pods/kube-system/kindnet-tmjnr\" mod_revision:331 > success:<request_put:<key:\"/registry/pods/kube-system/kindnet-tmjnr\" value_size:4622 >> failure:<request_range:<key:\"/registry/pods/kube-system/kindnet-tmjnr\" > >>","response":"size:16"}
{"level":"info","ts":"2024-02-01T09:09:45.835524Z","caller":"traceutil/trace.go:171","msg":"trace[1648602944] transaction","detail":"{read_only:false; response_revision:391; number_of_response:1; }","duration":"190.210701ms","start":"2024-02-01T09:09:45.645298Z","end":"2024-02-01T09:09:45.835509Z","steps":["trace[1648602944] 'process raft request' (duration: 190.086808ms)"],"step_count":1}
{"level":"info","ts":"2024-02-01T09:09:45.83589Z","caller":"traceutil/trace.go:171","msg":"trace[579062441] transaction","detail":"{read_only:false; response_revision:390; number_of_response:1; }","duration":"192.780308ms","start":"2024-02-01T09:09:45.643094Z","end":"2024-02-01T09:09:45.835874Z","steps":["trace[579062441] 'process raft request' (duration: 87.568243ms)","trace[579062441] 'compare' (duration: 102.802878ms)"],"step_count":2}
{"level":"info","ts":"2024-02-01T09:09:46.142634Z","caller":"traceutil/trace.go:171","msg":"trace[1529098278] transaction","detail":"{read_only:false; response_revision:399; number_of_response:1; }","duration":"101.320222ms","start":"2024-02-01T09:09:46.041299Z","end":"2024-02-01T09:09:46.142619Z","steps":[],"step_count":0}
{"level":"info","ts":"2024-02-01T09:09:46.143017Z","caller":"traceutil/trace.go:171","msg":"trace[2078178562] transaction","detail":"{read_only:false; response_revision:400; number_of_response:1; }","duration":"101.403853ms","start":"2024-02-01T09:09:46.04159Z","end":"2024-02-01T09:09:46.142993Z","steps":["trace[2078178562] 'process raft request' (duration: 100.957318ms)"],"step_count":1}
{"level":"info","ts":"2024-02-01T09:09:46.543063Z","caller":"traceutil/trace.go:171","msg":"trace[353493847] transaction","detail":"{read_only:false; response_revision:404; number_of_response:1; }","duration":"102.143208ms","start":"2024-02-01T09:09:46.440901Z","end":"2024-02-01T09:09:46.543044Z","steps":["trace[353493847] 'process raft request' (duration: 97.105134ms)"],"step_count":1}
{"level":"info","ts":"2024-02-01T09:09:46.543216Z","caller":"traceutil/trace.go:171","msg":"trace[581074164] linearizableReadLoop","detail":"{readStateIndex:417; appliedIndex:416; }","duration":"100.509541ms","start":"2024-02-01T09:09:46.442698Z","end":"2024-02-01T09:09:46.543207Z","steps":["trace[581074164] 'read index received' (duration: 95.742696ms)","trace[581074164] 'applied index is now lower than readState.Index' (duration: 4.76603ms)"],"step_count":2}
{"level":"warn","ts":"2024-02-01T09:09:46.543291Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"100.602564ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/clusterrolebindings/storage-provisioner\" ","response":"range_response_count:0 size:5"}
{"level":"info","ts":"2024-02-01T09:09:46.543319Z","caller":"traceutil/trace.go:171","msg":"trace[411364866] range","detail":"{range_begin:/registry/clusterrolebindings/storage-provisioner; range_end:; response_count:0; response_revision:406; }","duration":"100.645961ms","start":"2024-02-01T09:09:46.442666Z","end":"2024-02-01T09:09:46.543312Z","steps":["trace[411364866] 'agreement among raft nodes before linearized reading' (duration: 100.581921ms)"],"step_count":1}
{"level":"info","ts":"2024-02-01T09:09:46.543578Z","caller":"traceutil/trace.go:171","msg":"trace[757647246] transaction","detail":"{read_only:false; response_revision:405; number_of_response:1; }","duration":"100.406921ms","start":"2024-02-01T09:09:46.443162Z","end":"2024-02-01T09:09:46.543569Z","steps":["trace[757647246] 'process raft request' (duration: 99.519539ms)"],"step_count":1}
{"level":"warn","ts":"2024-02-01T09:09:46.750851Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"101.257618ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/health\" ","response":"range_response_count:0 size:5"}
{"level":"info","ts":"2024-02-01T09:09:46.830651Z","caller":"traceutil/trace.go:171","msg":"trace[572632252] range","detail":"{range_begin:/registry/health; range_end:; response_count:0; response_revision:416; }","duration":"181.049058ms","start":"2024-02-01T09:09:46.649563Z","end":"2024-02-01T09:09:46.830612Z","steps":["trace[572632252] 'agreement among raft nodes before linearized reading' (duration: 101.205179ms)"],"step_count":1}
{"level":"warn","ts":"2024-02-01T09:09:46.843386Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"193.520199ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/deployments/default/cloud-spanner-emulator\" ","response":"range_response_count:1 size:3455"}
{"level":"info","ts":"2024-02-01T09:09:46.843573Z","caller":"traceutil/trace.go:171","msg":"trace[1059453143] range","detail":"{range_begin:/registry/deployments/default/cloud-spanner-emulator; range_end:; response_count:1; response_revision:419; }","duration":"193.718627ms","start":"2024-02-01T09:09:46.649837Z","end":"2024-02-01T09:09:46.843555Z","steps":["trace[1059453143] 'agreement among raft nodes before linearized reading' (duration: 193.461223ms)"],"step_count":1}
{"level":"warn","ts":"2024-02-01T09:09:46.843816Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"194.130481ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/namespaces/kube-system\" ","response":"range_response_count:1 size:351"}
{"level":"info","ts":"2024-02-01T09:09:46.843916Z","caller":"traceutil/trace.go:171","msg":"trace[595456428] range","detail":"{range_begin:/registry/namespaces/kube-system; range_end:; response_count:1; response_revision:419; }","duration":"194.23581ms","start":"2024-02-01T09:09:46.64967Z","end":"2024-02-01T09:09:46.843906Z","steps":["trace[595456428] 'agreement among raft nodes before linearized reading' (duration: 194.085681ms)"],"step_count":1}
{"level":"warn","ts":"2024-02-01T09:09:46.844108Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"194.450586ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/clusterroles/minikube-ingress-dns\" ","response":"range_response_count:0 size:5"}
{"level":"info","ts":"2024-02-01T09:09:46.844173Z","caller":"traceutil/trace.go:171","msg":"trace[635167479] range","detail":"{range_begin:/registry/clusterroles/minikube-ingress-dns; range_end:; response_count:0; response_revision:419; }","duration":"194.517668ms","start":"2024-02-01T09:09:46.649648Z","end":"2024-02-01T09:09:46.844166Z","steps":["trace[635167479] 'agreement among raft nodes before linearized reading' (duration: 194.433127ms)"],"step_count":1}
{"level":"info","ts":"2024-02-01T09:11:09.145117Z","caller":"traceutil/trace.go:171","msg":"trace[1315432883] transaction","detail":"{read_only:false; response_revision:1207; number_of_response:1; }","duration":"200.237708ms","start":"2024-02-01T09:11:08.944847Z","end":"2024-02-01T09:11:09.145085Z","steps":["trace[1315432883] 'process raft request' (duration: 117.141702ms)","trace[1315432883] 'compare' (duration: 82.910917ms)"],"step_count":2}
{"level":"info","ts":"2024-02-01T09:11:25.754011Z","caller":"traceutil/trace.go:171","msg":"trace[881509867] transaction","detail":"{read_only:false; number_of_response:1; response_revision:1345; }","duration":"120.851077ms","start":"2024-02-01T09:11:25.633139Z","end":"2024-02-01T09:11:25.75399Z","steps":["trace[881509867] 'process raft request' (duration: 120.682673ms)"],"step_count":1}
{"level":"info","ts":"2024-02-01T09:11:25.796854Z","caller":"traceutil/trace.go:171","msg":"trace[1379402890] transaction","detail":"{read_only:false; response_revision:1346; number_of_response:1; }","duration":"163.536174ms","start":"2024-02-01T09:11:25.633298Z","end":"2024-02-01T09:11:25.796834Z","steps":["trace[1379402890] 'process raft request' (duration: 163.391077ms)"],"step_count":1}
==> gcp-auth [86d3f36b801a3a87d3e8da4a887460aa1d44646e2d0f6db2b98d7e3f48f43098] <==
2024/02/01 09:11:01 GCP Auth Webhook started!
2024/02/01 09:11:13 Ready to marshal response ...
2024/02/01 09:11:13 Ready to write response ...
2024/02/01 09:11:13 Ready to marshal response ...
2024/02/01 09:11:13 Ready to write response ...
2024/02/01 09:11:23 Ready to marshal response ...
2024/02/01 09:11:23 Ready to write response ...
2024/02/01 09:11:23 Ready to marshal response ...
2024/02/01 09:11:23 Ready to write response ...
2024/02/01 09:11:32 Ready to marshal response ...
2024/02/01 09:11:32 Ready to write response ...
2024/02/01 09:11:32 Ready to marshal response ...
2024/02/01 09:11:32 Ready to write response ...
2024/02/01 09:11:32 Ready to marshal response ...
2024/02/01 09:11:32 Ready to write response ...
2024/02/01 09:11:34 Ready to marshal response ...
2024/02/01 09:11:34 Ready to write response ...
2024/02/01 09:11:49 Ready to marshal response ...
2024/02/01 09:11:49 Ready to write response ...
2024/02/01 09:12:03 Ready to marshal response ...
2024/02/01 09:12:03 Ready to write response ...
2024/02/01 09:12:23 Ready to marshal response ...
2024/02/01 09:12:23 Ready to write response ...
2024/02/01 09:13:35 Ready to marshal response ...
2024/02/01 09:13:35 Ready to write response ...
==> kernel <==
09:13:45 up 15:56, 0 users, load average: 0.26, 0.77, 1.44
Linux addons-642352 5.15.0-1049-gcp #57~20.04.1-Ubuntu SMP Wed Jan 17 16:04:23 UTC 2024 x86_64 x86_64 x86_64 GNU/Linux
PRETTY_NAME="Ubuntu 22.04.3 LTS"
==> kindnet [eb07b25a2d8b02843d1d94db95fb299db42db259c6ad937a2d56a9fd3cebb0c0] <==
I0201 09:11:40.479291 1 main.go:227] handling current node
I0201 09:11:50.488447 1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
I0201 09:11:50.488481 1 main.go:227] handling current node
I0201 09:12:00.496526 1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
I0201 09:12:00.496555 1 main.go:227] handling current node
I0201 09:12:10.500977 1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
I0201 09:12:10.501003 1 main.go:227] handling current node
I0201 09:12:20.513904 1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
I0201 09:12:20.513929 1 main.go:227] handling current node
I0201 09:12:30.526771 1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
I0201 09:12:30.526803 1 main.go:227] handling current node
I0201 09:12:40.531145 1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
I0201 09:12:40.531172 1 main.go:227] handling current node
I0201 09:12:50.539277 1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
I0201 09:12:50.539307 1 main.go:227] handling current node
I0201 09:13:00.544820 1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
I0201 09:13:00.544849 1 main.go:227] handling current node
I0201 09:13:10.554046 1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
I0201 09:13:10.554074 1 main.go:227] handling current node
I0201 09:13:20.565666 1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
I0201 09:13:20.565692 1 main.go:227] handling current node
I0201 09:13:30.577774 1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
I0201 09:13:30.577802 1 main.go:227] handling current node
I0201 09:13:40.589764 1 main.go:223] Handling node with IPs: map[192.168.49.2:{}]
I0201 09:13:40.589788 1 main.go:227] handling current node
==> kube-apiserver [4b243027fc21a6f0805393d17d2db16d653d062a55ddf49a14fa041da227041d] <==
I0201 09:11:26.183516 1 handler.go:232] Adding GroupVersion metrics.k8s.io v1beta1 to ResourceManager
I0201 09:11:32.530280 1 alloc.go:330] "allocated clusterIPs" service="headlamp/headlamp" clusterIPs={"IPv4":"10.111.114.236"}
E0201 09:11:51.742333 1 authentication.go:73] "Unable to authenticate the request" err="[invalid bearer token, serviceaccounts \"local-path-provisioner-service-account\" not found]"
E0201 09:11:53.310330 1 upgradeaware.go:425] Error proxying data from client to backend: read tcp 192.168.49.2:8443->10.244.0.28:38090: read: connection reset by peer
I0201 09:12:17.797495 1 controller.go:624] quota admission added evaluator for: volumesnapshots.snapshot.storage.k8s.io
I0201 09:12:18.688696 1 controller.go:129] OpenAPI AggregationController: action for item v1beta1.metrics.k8s.io: Nothing (removed from the queue).
I0201 09:12:41.003413 1 handler.go:232] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
I0201 09:12:41.003470 1 handler.go:232] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
I0201 09:12:41.010492 1 handler.go:232] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
I0201 09:12:41.010555 1 handler.go:232] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
I0201 09:12:41.017719 1 handler.go:232] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
I0201 09:12:41.017890 1 handler.go:232] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
I0201 09:12:41.018797 1 handler.go:232] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
I0201 09:12:41.018898 1 handler.go:232] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
I0201 09:12:41.031286 1 handler.go:232] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
I0201 09:12:41.031429 1 handler.go:232] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
I0201 09:12:41.035644 1 handler.go:232] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
I0201 09:12:41.035804 1 handler.go:232] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
I0201 09:12:41.047615 1 handler.go:232] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
I0201 09:12:41.049563 1 handler.go:232] Adding GroupVersion snapshot.storage.k8s.io v1beta1 to ResourceManager
I0201 09:12:41.049595 1 handler.go:232] Adding GroupVersion snapshot.storage.k8s.io v1 to ResourceManager
W0201 09:12:42.019536 1 cacher.go:171] Terminating all watchers from cacher volumesnapshotclasses.snapshot.storage.k8s.io
W0201 09:12:42.048812 1 cacher.go:171] Terminating all watchers from cacher volumesnapshotcontents.snapshot.storage.k8s.io
W0201 09:12:42.058139 1 cacher.go:171] Terminating all watchers from cacher volumesnapshots.snapshot.storage.k8s.io
I0201 09:13:35.400490 1 alloc.go:330] "allocated clusterIPs" service="default/hello-world-app" clusterIPs={"IPv4":"10.108.86.134"}
==> kube-controller-manager [7949a0c4d089777eb1fa2c8a928c705268156eb66fe00b01925a2678bef573aa] <==
W0201 09:12:56.482301 1 reflector.go:535] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E0201 09:12:56.482334 1 reflector.go:147] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W0201 09:12:59.649693 1 reflector.go:535] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E0201 09:12:59.649728 1 reflector.go:147] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W0201 09:12:59.808059 1 reflector.go:535] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E0201 09:12:59.808093 1 reflector.go:147] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W0201 09:13:03.452125 1 reflector.go:535] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E0201 09:13:03.452161 1 reflector.go:147] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W0201 09:13:14.500201 1 reflector.go:535] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E0201 09:13:14.500245 1 reflector.go:147] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W0201 09:13:14.817854 1 reflector.go:535] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E0201 09:13:14.817890 1 reflector.go:147] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W0201 09:13:22.463227 1 reflector.go:535] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E0201 09:13:22.463261 1 reflector.go:147] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I0201 09:13:35.234619 1 event.go:307] "Event occurred" object="default/hello-world-app" fieldPath="" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set hello-world-app-5d77478584 to 1"
I0201 09:13:35.245314 1 event.go:307] "Event occurred" object="default/hello-world-app-5d77478584" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: hello-world-app-5d77478584-gtfbn"
I0201 09:13:35.251068 1 replica_set.go:676] "Finished syncing" kind="ReplicaSet" key="default/hello-world-app-5d77478584" duration="16.758439ms"
I0201 09:13:35.256272 1 replica_set.go:676] "Finished syncing" kind="ReplicaSet" key="default/hello-world-app-5d77478584" duration="5.147387ms"
I0201 09:13:35.256399 1 replica_set.go:676] "Finished syncing" kind="ReplicaSet" key="default/hello-world-app-5d77478584" duration="50.864µs"
I0201 09:13:35.262304 1 replica_set.go:676] "Finished syncing" kind="ReplicaSet" key="default/hello-world-app-5d77478584" duration="69.617µs"
I0201 09:13:37.280350 1 job_controller.go:562] "enqueueing job" key="ingress-nginx/ingress-nginx-admission-create"
I0201 09:13:37.280429 1 replica_set.go:676] "Finished syncing" kind="ReplicaSet" key="ingress-nginx/ingress-nginx-controller-69cff4fd79" duration="7.582µs"
I0201 09:13:37.283427 1 job_controller.go:562] "enqueueing job" key="ingress-nginx/ingress-nginx-admission-patch"
I0201 09:13:38.408664 1 replica_set.go:676] "Finished syncing" kind="ReplicaSet" key="default/hello-world-app-5d77478584" duration="5.871619ms"
I0201 09:13:38.408759 1 replica_set.go:676] "Finished syncing" kind="ReplicaSet" key="default/hello-world-app-5d77478584" duration="42.269µs"
==> kube-proxy [6a9b051d431b740b081c01f29a7f09791df155928bb79bb10ee69b8155621160] <==
I0201 09:09:45.235753 1 server_others.go:69] "Using iptables proxy"
I0201 09:09:45.453494 1 node.go:141] Successfully retrieved node IP: 192.168.49.2
I0201 09:09:47.051827 1 server.go:632] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
I0201 09:09:47.237586 1 server_others.go:152] "Using iptables Proxier"
I0201 09:09:47.237665 1 server_others.go:421] "Detect-local-mode set to ClusterCIDR, but no cluster CIDR for family" ipFamily="IPv6"
I0201 09:09:47.237678 1 server_others.go:438] "Defaulting to no-op detect-local"
I0201 09:09:47.237720 1 proxier.go:251] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses"
I0201 09:09:47.238008 1 server.go:846] "Version info" version="v1.28.4"
I0201 09:09:47.238036 1 server.go:848] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
I0201 09:09:47.239046 1 config.go:188] "Starting service config controller"
I0201 09:09:47.239132 1 shared_informer.go:311] Waiting for caches to sync for service config
I0201 09:09:47.239194 1 config.go:97] "Starting endpoint slice config controller"
I0201 09:09:47.239223 1 shared_informer.go:311] Waiting for caches to sync for endpoint slice config
I0201 09:09:47.239821 1 config.go:315] "Starting node config controller"
I0201 09:09:47.239877 1 shared_informer.go:311] Waiting for caches to sync for node config
I0201 09:09:47.344823 1 shared_informer.go:318] Caches are synced for node config
I0201 09:09:47.344860 1 shared_informer.go:318] Caches are synced for service config
I0201 09:09:47.344890 1 shared_informer.go:318] Caches are synced for endpoint slice config
==> kube-scheduler [53d39c795f697fdb588a98ca700e72d88126bf79f76af820ba43f2e885a71258] <==
W0201 09:09:26.357989 1 reflector.go:535] vendor/k8s.io/client-go/informers/factory.go:150: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
E0201 09:09:26.358006 1 reflector.go:147] vendor/k8s.io/client-go/informers/factory.go:150: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
W0201 09:09:26.358066 1 reflector.go:535] vendor/k8s.io/client-go/informers/factory.go:150: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
E0201 09:09:26.358081 1 reflector.go:147] vendor/k8s.io/client-go/informers/factory.go:150: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
W0201 09:09:26.358145 1 reflector.go:535] vendor/k8s.io/client-go/informers/factory.go:150: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
E0201 09:09:26.358170 1 reflector.go:147] vendor/k8s.io/client-go/informers/factory.go:150: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
W0201 09:09:26.358330 1 reflector.go:535] vendor/k8s.io/client-go/informers/factory.go:150: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
E0201 09:09:26.358355 1 reflector.go:147] vendor/k8s.io/client-go/informers/factory.go:150: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
W0201 09:09:27.271964 1 reflector.go:535] vendor/k8s.io/client-go/informers/factory.go:150: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
E0201 09:09:27.271995 1 reflector.go:147] vendor/k8s.io/client-go/informers/factory.go:150: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
W0201 09:09:27.281451 1 reflector.go:535] vendor/k8s.io/client-go/informers/factory.go:150: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
E0201 09:09:27.281490 1 reflector.go:147] vendor/k8s.io/client-go/informers/factory.go:150: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
W0201 09:09:27.312920 1 reflector.go:535] vendor/k8s.io/client-go/informers/factory.go:150: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
E0201 09:09:27.312960 1 reflector.go:147] vendor/k8s.io/client-go/informers/factory.go:150: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
W0201 09:09:27.324394 1 reflector.go:535] vendor/k8s.io/client-go/informers/factory.go:150: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
E0201 09:09:27.324431 1 reflector.go:147] vendor/k8s.io/client-go/informers/factory.go:150: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
W0201 09:09:27.339690 1 reflector.go:535] vendor/k8s.io/client-go/informers/factory.go:150: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
E0201 09:09:27.339726 1 reflector.go:147] vendor/k8s.io/client-go/informers/factory.go:150: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
W0201 09:09:27.396851 1 reflector.go:535] vendor/k8s.io/client-go/informers/factory.go:150: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
E0201 09:09:27.396885 1 reflector.go:147] vendor/k8s.io/client-go/informers/factory.go:150: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
W0201 09:09:27.435852 1 reflector.go:535] vendor/k8s.io/client-go/informers/factory.go:150: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
E0201 09:09:27.435890 1 reflector.go:147] vendor/k8s.io/client-go/informers/factory.go:150: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
W0201 09:09:27.611792 1 reflector.go:535] pkg/server/dynamiccertificates/configmap_cafile_content.go:206: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
E0201 09:09:27.611827 1 reflector.go:147] pkg/server/dynamiccertificates/configmap_cafile_content.go:206: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
I0201 09:09:29.450861 1 shared_informer.go:318] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
==> kubelet <==
Feb 01 09:13:35 addons-642352 kubelet[1551]: I0201 09:13:35.443032 1551 reconciler_common.go:258] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mh9w\" (UniqueName: \"kubernetes.io/projected/55e16530-1b4f-4351-99cc-a2cce79bbc11-kube-api-access-9mh9w\") pod \"hello-world-app-5d77478584-gtfbn\" (UID: \"55e16530-1b4f-4351-99cc-a2cce79bbc11\") " pod="default/hello-world-app-5d77478584-gtfbn"
Feb 01 09:13:35 addons-642352 kubelet[1551]: I0201 09:13:35.443109 1551 reconciler_common.go:258] "operationExecutor.VerifyControllerAttachedVolume started for volume \"gcp-creds\" (UniqueName: \"kubernetes.io/host-path/55e16530-1b4f-4351-99cc-a2cce79bbc11-gcp-creds\") pod \"hello-world-app-5d77478584-gtfbn\" (UID: \"55e16530-1b4f-4351-99cc-a2cce79bbc11\") " pod="default/hello-world-app-5d77478584-gtfbn"
Feb 01 09:13:35 addons-642352 kubelet[1551]: W0201 09:13:35.888208 1551 manager.go:1159] Failed to process watch event {EventType:0 Name:/docker/ba9aca09f642738d1e391d3fcd2462426a7803a0e2d60cc2f60823541ed64bf0/crio-4d9aebf8b2e08c5572124f0f53f61f2a4e6927e5b9bbd2a1c1a33c9004617ab9 WatchSource:0}: Error finding container 4d9aebf8b2e08c5572124f0f53f61f2a4e6927e5b9bbd2a1c1a33c9004617ab9: Status 404 returned error can't find the container with id 4d9aebf8b2e08c5572124f0f53f61f2a4e6927e5b9bbd2a1c1a33c9004617ab9
Feb 01 09:13:36 addons-642352 kubelet[1551]: I0201 09:13:36.385028 1551 scope.go:117] "RemoveContainer" containerID="a31ea1a1a3ab2ccbc2c39f6a89f378d7925c22580589ee76e178d6cb94e47c7f"
Feb 01 09:13:36 addons-642352 kubelet[1551]: I0201 09:13:36.400407 1551 scope.go:117] "RemoveContainer" containerID="a31ea1a1a3ab2ccbc2c39f6a89f378d7925c22580589ee76e178d6cb94e47c7f"
Feb 01 09:13:36 addons-642352 kubelet[1551]: E0201 09:13:36.400882 1551 remote_runtime.go:432] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a31ea1a1a3ab2ccbc2c39f6a89f378d7925c22580589ee76e178d6cb94e47c7f\": container with ID starting with a31ea1a1a3ab2ccbc2c39f6a89f378d7925c22580589ee76e178d6cb94e47c7f not found: ID does not exist" containerID="a31ea1a1a3ab2ccbc2c39f6a89f378d7925c22580589ee76e178d6cb94e47c7f"
Feb 01 09:13:36 addons-642352 kubelet[1551]: I0201 09:13:36.400932 1551 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a31ea1a1a3ab2ccbc2c39f6a89f378d7925c22580589ee76e178d6cb94e47c7f"} err="failed to get container status \"a31ea1a1a3ab2ccbc2c39f6a89f378d7925c22580589ee76e178d6cb94e47c7f\": rpc error: code = NotFound desc = could not find container \"a31ea1a1a3ab2ccbc2c39f6a89f378d7925c22580589ee76e178d6cb94e47c7f\": container with ID starting with a31ea1a1a3ab2ccbc2c39f6a89f378d7925c22580589ee76e178d6cb94e47c7f not found: ID does not exist"
Feb 01 09:13:36 addons-642352 kubelet[1551]: I0201 09:13:36.451992 1551 reconciler_common.go:172] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5qsc\" (UniqueName: \"kubernetes.io/projected/46dc9e1a-2137-442c-993a-921d6322672a-kube-api-access-g5qsc\") pod \"46dc9e1a-2137-442c-993a-921d6322672a\" (UID: \"46dc9e1a-2137-442c-993a-921d6322672a\") "
Feb 01 09:13:36 addons-642352 kubelet[1551]: I0201 09:13:36.453920 1551 operation_generator.go:882] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46dc9e1a-2137-442c-993a-921d6322672a-kube-api-access-g5qsc" (OuterVolumeSpecName: "kube-api-access-g5qsc") pod "46dc9e1a-2137-442c-993a-921d6322672a" (UID: "46dc9e1a-2137-442c-993a-921d6322672a"). InnerVolumeSpecName "kube-api-access-g5qsc". PluginName "kubernetes.io/projected", VolumeGidValue ""
Feb 01 09:13:36 addons-642352 kubelet[1551]: I0201 09:13:36.552923 1551 reconciler_common.go:300] "Volume detached for volume \"kube-api-access-g5qsc\" (UniqueName: \"kubernetes.io/projected/46dc9e1a-2137-442c-993a-921d6322672a-kube-api-access-g5qsc\") on node \"addons-642352\" DevicePath \"\""
Feb 01 09:13:37 addons-642352 kubelet[1551]: I0201 09:13:37.054967 1551 kubelet_volumes.go:161] "Cleaned up orphaned pod volumes dir" podUID="46dc9e1a-2137-442c-993a-921d6322672a" path="/var/lib/kubelet/pods/46dc9e1a-2137-442c-993a-921d6322672a/volumes"
Feb 01 09:13:38 addons-642352 kubelet[1551]: I0201 09:13:38.403126 1551 pod_startup_latency_tracker.go:102] "Observed pod startup duration" pod="default/hello-world-app-5d77478584-gtfbn" podStartSLOduration=1.426987545 podCreationTimestamp="2024-02-01 09:13:35 +0000 UTC" firstStartedPulling="2024-02-01 09:13:35.891372315 +0000 UTC m=+246.969754427" lastFinishedPulling="2024-02-01 09:13:37.867464754 +0000 UTC m=+248.945846862" observedRunningTime="2024-02-01 09:13:38.402585571 +0000 UTC m=+249.480967685" watchObservedRunningTime="2024-02-01 09:13:38.40307998 +0000 UTC m=+249.481462092"
Feb 01 09:13:39 addons-642352 kubelet[1551]: I0201 09:13:39.054991 1551 kubelet_volumes.go:161] "Cleaned up orphaned pod volumes dir" podUID="a039fc16-f7c1-49e3-b072-9938e3b045bb" path="/var/lib/kubelet/pods/a039fc16-f7c1-49e3-b072-9938e3b045bb/volumes"
Feb 01 09:13:39 addons-642352 kubelet[1551]: I0201 09:13:39.055347 1551 kubelet_volumes.go:161] "Cleaned up orphaned pod volumes dir" podUID="d0fb6356-f017-476b-8046-fd70e977d8ff" path="/var/lib/kubelet/pods/d0fb6356-f017-476b-8046-fd70e977d8ff/volumes"
Feb 01 09:13:40 addons-642352 kubelet[1551]: I0201 09:13:40.579268 1551 reconciler_common.go:172] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cc254d8c-527a-49b3-8571-5f674630e01b-webhook-cert\") pod \"cc254d8c-527a-49b3-8571-5f674630e01b\" (UID: \"cc254d8c-527a-49b3-8571-5f674630e01b\") "
Feb 01 09:13:40 addons-642352 kubelet[1551]: I0201 09:13:40.579348 1551 reconciler_common.go:172] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwlx6\" (UniqueName: \"kubernetes.io/projected/cc254d8c-527a-49b3-8571-5f674630e01b-kube-api-access-nwlx6\") pod \"cc254d8c-527a-49b3-8571-5f674630e01b\" (UID: \"cc254d8c-527a-49b3-8571-5f674630e01b\") "
Feb 01 09:13:40 addons-642352 kubelet[1551]: I0201 09:13:40.581355 1551 operation_generator.go:882] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc254d8c-527a-49b3-8571-5f674630e01b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "cc254d8c-527a-49b3-8571-5f674630e01b" (UID: "cc254d8c-527a-49b3-8571-5f674630e01b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue ""
Feb 01 09:13:40 addons-642352 kubelet[1551]: I0201 09:13:40.581863 1551 operation_generator.go:882] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc254d8c-527a-49b3-8571-5f674630e01b-kube-api-access-nwlx6" (OuterVolumeSpecName: "kube-api-access-nwlx6") pod "cc254d8c-527a-49b3-8571-5f674630e01b" (UID: "cc254d8c-527a-49b3-8571-5f674630e01b"). InnerVolumeSpecName "kube-api-access-nwlx6". PluginName "kubernetes.io/projected", VolumeGidValue ""
Feb 01 09:13:40 addons-642352 kubelet[1551]: I0201 09:13:40.680177 1551 reconciler_common.go:300] "Volume detached for volume \"kube-api-access-nwlx6\" (UniqueName: \"kubernetes.io/projected/cc254d8c-527a-49b3-8571-5f674630e01b-kube-api-access-nwlx6\") on node \"addons-642352\" DevicePath \"\""
Feb 01 09:13:40 addons-642352 kubelet[1551]: I0201 09:13:40.680223 1551 reconciler_common.go:300] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cc254d8c-527a-49b3-8571-5f674630e01b-webhook-cert\") on node \"addons-642352\" DevicePath \"\""
Feb 01 09:13:41 addons-642352 kubelet[1551]: I0201 09:13:41.054633 1551 kubelet_volumes.go:161] "Cleaned up orphaned pod volumes dir" podUID="cc254d8c-527a-49b3-8571-5f674630e01b" path="/var/lib/kubelet/pods/cc254d8c-527a-49b3-8571-5f674630e01b/volumes"
Feb 01 09:13:41 addons-642352 kubelet[1551]: I0201 09:13:41.401287 1551 scope.go:117] "RemoveContainer" containerID="a54df7cee08c7b932964eb5a1c87d9fa818f1e82d70b34643f0cf4e6a340d4c0"
Feb 01 09:13:41 addons-642352 kubelet[1551]: I0201 09:13:41.416461 1551 scope.go:117] "RemoveContainer" containerID="a54df7cee08c7b932964eb5a1c87d9fa818f1e82d70b34643f0cf4e6a340d4c0"
Feb 01 09:13:41 addons-642352 kubelet[1551]: E0201 09:13:41.416900 1551 remote_runtime.go:432] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a54df7cee08c7b932964eb5a1c87d9fa818f1e82d70b34643f0cf4e6a340d4c0\": container with ID starting with a54df7cee08c7b932964eb5a1c87d9fa818f1e82d70b34643f0cf4e6a340d4c0 not found: ID does not exist" containerID="a54df7cee08c7b932964eb5a1c87d9fa818f1e82d70b34643f0cf4e6a340d4c0"
Feb 01 09:13:41 addons-642352 kubelet[1551]: I0201 09:13:41.416952 1551 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a54df7cee08c7b932964eb5a1c87d9fa818f1e82d70b34643f0cf4e6a340d4c0"} err="failed to get container status \"a54df7cee08c7b932964eb5a1c87d9fa818f1e82d70b34643f0cf4e6a340d4c0\": rpc error: code = NotFound desc = could not find container \"a54df7cee08c7b932964eb5a1c87d9fa818f1e82d70b34643f0cf4e6a340d4c0\": container with ID starting with a54df7cee08c7b932964eb5a1c87d9fa818f1e82d70b34643f0cf4e6a340d4c0 not found: ID does not exist"
==> storage-provisioner [dba6c9646ec96e00d1adc43b899985ab7e90b2cb3538afc5b34b5893ea2c92e2] <==
I0201 09:09:53.335337 1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
I0201 09:09:53.347421 1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
I0201 09:09:53.347471 1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
I0201 09:09:53.355505 1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
I0201 09:09:53.355727 1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_addons-642352_3810d19d-1e08-4256-b3a0-b060c69f6f92!
I0201 09:09:53.356139 1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"c924034c-021f-4701-9979-d9844442e945", APIVersion:"v1", ResourceVersion:"861", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' addons-642352_3810d19d-1e08-4256-b3a0-b060c69f6f92 became leader
I0201 09:09:53.530672 1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_addons-642352_3810d19d-1e08-4256-b3a0-b060c69f6f92!
-- /stdout --
helpers_test.go:254: (dbg) Run: out/minikube-linux-amd64 status --format={{.APIServer}} -p addons-642352 -n addons-642352
helpers_test.go:261: (dbg) Run: kubectl --context addons-642352 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:285: <<< TestAddons/parallel/Ingress FAILED: end of post-mortem logs <<<
helpers_test.go:286: ---------------------/post-mortem---------------------------------
--- FAIL: TestAddons/parallel/Ingress (163.14s)