Test Report: KVM_Linux_containerd 12230

                    
                      1c76ff5cea01605c2d985c010644edf1e689d34b:2021-08-13:19970
                    
                

Test fail (2/263)

Order failed test Duration
228 TestPause/serial/PauseAgain 10.84
238 TestNetworkPlugins/group/calico/Start 580.22
x
+
TestPause/serial/PauseAgain (10.84s)

                                                
                                                
=== RUN   TestPause/serial/PauseAgain
pause_test.go:107: (dbg) Run:  out/minikube-linux-amd64 pause -p pause-20210813001951-679351 --alsologtostderr -v=5

                                                
                                                
=== CONT  TestPause/serial/PauseAgain
pause_test.go:107: (dbg) Non-zero exit: out/minikube-linux-amd64 pause -p pause-20210813001951-679351 --alsologtostderr -v=5: exit status 80 (6.266083141s)

                                                
                                                
-- stdout --
	* Pausing node pause-20210813001951-679351 ... 
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0813 00:24:03.823623  717142 out.go:298] Setting OutFile to fd 1 ...
	I0813 00:24:03.823843  717142 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0813 00:24:03.823857  717142 out.go:311] Setting ErrFile to fd 2...
	I0813 00:24:03.823862  717142 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0813 00:24:03.823996  717142 root.go:313] Updating PATH: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/bin
	I0813 00:24:03.824254  717142 out.go:305] Setting JSON to false
	I0813 00:24:03.824290  717142 mustload.go:65] Loading cluster: pause-20210813001951-679351
	I0813 00:24:03.825243  717142 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:24:03.825303  717142 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:24:03.840435  717142 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:46807
	I0813 00:24:03.840994  717142 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:24:03.841673  717142 main.go:130] libmachine: Using API Version  1
	I0813 00:24:03.841698  717142 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:24:03.842109  717142 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:24:03.842328  717142 main.go:130] libmachine: (pause-20210813001951-679351) Calling .GetState
	I0813 00:24:03.845846  717142 host.go:66] Checking if "pause-20210813001951-679351" exists ...
	I0813 00:24:03.846204  717142 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:24:03.846247  717142 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:24:03.859125  717142 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:46749
	I0813 00:24:03.859721  717142 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:24:03.860239  717142 main.go:130] libmachine: Using API Version  1
	I0813 00:24:03.860264  717142 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:24:03.860640  717142 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:24:03.860799  717142 main.go:130] libmachine: (pause-20210813001951-679351) Calling .DriverName
	I0813 00:24:03.861566  717142 pause.go:58] "namespaces" [kube-system kubernetes-dashboard storage-gluster istio-operator]="keys" map[addons:[] all:%!s(bool=false) apiserver-ips:[] apiserver-name:minikubeCA apiserver-names:[] apiserver-port:%!s(int=8443) auto-update-drivers:%!s(bool=true) base-image:gcr.io/k8s-minikube/kicbase:v0.0.25@sha256:6f936e3443b95cd918d77623bf7b595653bb382766e280290a02b4a349e88b79 bootstrapper:kubeadm cache-images:%!s(bool=true) cancel-scheduled:%!s(bool=false) cni: container-runtime:docker cpus:2 cri-socket: delete-on-failure:%!s(bool=false) disable-driver-mounts:%!s(bool=false) disk-size:20000mb dns-domain:cluster.local dns-proxy:%!s(bool=false) docker-env:[] docker-opt:[] download-only:%!s(bool=false) driver: dry-run:%!s(bool=false) embed-certs:%!s(bool=false) embedcerts:%!s(bool=false) enable-default-cni:%!s(bool=false) extra-config: extra-disks:%!s(int=0) feature-gates: force:%!s(bool=false) force-systemd:%!s(bool=false) host-dns-resolver:%!s(bool=true) host-only-cidr:192
.168.99.1/24 host-only-nic-type:virtio hyperkit-vpnkit-sock: hyperkit-vsock-ports:[] hyperv-external-adapter: hyperv-use-external-switch:%!s(bool=false) hyperv-virtual-switch: image-mirror-country: image-repository: insecure-registry:[] install-addons:%!s(bool=true) interactive:%!s(bool=true) iso-url:[https://storage.googleapis.com/minikube-builds/iso/12122/minikube-v1.22.0-1628238775-12122.iso https://github.com/kubernetes/minikube/releases/download/v1.22.0-1628238775-12122/minikube-v1.22.0-1628238775-12122.iso https://kubernetes.oss-cn-hangzhou.aliyuncs.com/minikube/iso/minikube-v1.22.0-1628238775-12122.iso] keep-context:%!s(bool=false) keep-context-active:%!s(bool=false) kubernetes-version: kvm-gpu:%!s(bool=false) kvm-hidden:%!s(bool=false) kvm-network:default kvm-numa-count:%!s(int=1) kvm-qemu-uri:qemu:///system listen-address: memory: mount:%!s(bool=false) mount-string:/home/jenkins:/minikube-host namespace:default nat-nic-type:virtio native-ssh:%!s(bool=true) network: network-plugin: nfs-share:[] nfs-sh
ares-root:/nfsshares no-vtx-check:%!s(bool=false) nodes:%!s(int=1) output:text ports:[] preload:%!s(bool=true) profile:pause-20210813001951-679351 purge:%!s(bool=false) registry-mirror:[] reminderwaitperiodinhours:%!s(int=24) schedule:0s service-cluster-ip-range:10.96.0.0/12 ssh-ip-address: ssh-key: ssh-port:%!s(int=22) ssh-user:root trace: user: uuid: vm:%!s(bool=false) vm-driver: wait:[apiserver system_pods] wait-timeout:6m0s wantnonedriverwarning:%!s(bool=true) wantupdatenotification:%!s(bool=true) wantvirtualboxdriverwarning:%!s(bool=true)]="(MISSING)"
	I0813 00:24:04.557047  717142 out.go:177] * Pausing node pause-20210813001951-679351 ... 
	I0813 00:24:04.557096  717142 host.go:66] Checking if "pause-20210813001951-679351" exists ...
	I0813 00:24:04.557478  717142 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:24:04.557547  717142 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:24:04.572028  717142 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:35585
	I0813 00:24:04.572591  717142 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:24:04.573279  717142 main.go:130] libmachine: Using API Version  1
	I0813 00:24:04.573309  717142 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:24:04.573777  717142 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:24:04.573975  717142 main.go:130] libmachine: (pause-20210813001951-679351) Calling .DriverName
	I0813 00:24:04.574215  717142 ssh_runner.go:149] Run: systemctl --version
	I0813 00:24:04.574245  717142 main.go:130] libmachine: (pause-20210813001951-679351) Calling .GetSSHHostname
	I0813 00:24:04.581466  717142 main.go:130] libmachine: (pause-20210813001951-679351) DBG | domain pause-20210813001951-679351 has defined MAC address 52:54:00:82:3b:c7 in network mk-pause-20210813001951-679351
	I0813 00:24:04.581887  717142 main.go:130] libmachine: (pause-20210813001951-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:82:3b:c7", ip: ""} in network mk-pause-20210813001951-679351: {Iface:virbr11 ExpiryTime:2021-08-13 01:20:54 +0000 UTC Type:0 Mac:52:54:00:82:3b:c7 Iaid: IPaddr:192.168.127.196 Prefix:24 Hostname:pause-20210813001951-679351 Clientid:01:52:54:00:82:3b:c7}
	I0813 00:24:04.581918  717142 main.go:130] libmachine: (pause-20210813001951-679351) DBG | domain pause-20210813001951-679351 has defined IP address 192.168.127.196 and MAC address 52:54:00:82:3b:c7 in network mk-pause-20210813001951-679351
	I0813 00:24:04.582082  717142 main.go:130] libmachine: (pause-20210813001951-679351) Calling .GetSSHPort
	I0813 00:24:04.582277  717142 main.go:130] libmachine: (pause-20210813001951-679351) Calling .GetSSHKeyPath
	I0813 00:24:04.582444  717142 main.go:130] libmachine: (pause-20210813001951-679351) Calling .GetSSHUsername
	I0813 00:24:04.582585  717142 sshutil.go:53] new ssh client: &{IP:192.168.127.196 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/pause-20210813001951-679351/id_rsa Username:docker}
	I0813 00:24:04.710149  717142 ssh_runner.go:149] Run: sudo systemctl is-active --quiet service kubelet
	I0813 00:24:04.729788  717142 pause.go:50] kubelet running: true
	I0813 00:24:04.729846  717142 ssh_runner.go:149] Run: sudo systemctl disable --now kubelet
	I0813 00:24:07.264937  717142 ssh_runner.go:189] Completed: sudo systemctl disable --now kubelet: (2.535062871s)
	I0813 00:24:07.265005  717142 cri.go:41] listing CRI containers in root /run/containerd/runc/k8s.io: {State:running Name: Namespaces:[kube-system kubernetes-dashboard storage-gluster istio-operator]}
	I0813 00:24:07.265084  717142 ssh_runner.go:149] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system; crictl ps -a --quiet --label io.kubernetes.pod.namespace=kubernetes-dashboard; crictl ps -a --quiet --label io.kubernetes.pod.namespace=storage-gluster; crictl ps -a --quiet --label io.kubernetes.pod.namespace=istio-operator"
	I0813 00:24:07.390746  717142 cri.go:76] found id: "a4a4ec1132e56e93b77cf82333943e5daaab8efa48df65ced586f5a324cde37d"
	I0813 00:24:07.390787  717142 cri.go:76] found id: "7899222a89a71ffe4fb8d232a5b3799b5c94ea7e430406ce44654a4c80946ed9"
	I0813 00:24:07.390795  717142 cri.go:76] found id: "d30696c5405d8e3fbc2bfe7ef7e391b98c301d6056d08f1d32a9614f101edc6f"
	I0813 00:24:07.390801  717142 cri.go:76] found id: "6204b21ab9c2c6e799da39874e9eb93e39284c65231dd05603088db2fa6b8e6b"
	I0813 00:24:07.390806  717142 cri.go:76] found id: "705c524b7bd2d071e133ec74fb1f433cab624312145e8e0d2e4b19e7936be85b"
	I0813 00:24:07.390812  717142 cri.go:76] found id: "64c935095bced983323337af866a47ac732cfe3496f8dfd31387b8833f7cc6c0"
	I0813 00:24:07.390819  717142 cri.go:76] found id: "85bd885bbae1eb206d16fda70dde9e78726f0563495bc0c5f64cf2083b9a7bf7"
	I0813 00:24:07.390824  717142 cri.go:76] found id: "9cb0b80b9734ad17c461045a38b0d0a6b8d8686ef46bf8dd99a00d62b8cc67fe"
	I0813 00:24:07.390830  717142 cri.go:76] found id: "efb6b8992aa826974c98985c6dbeb065b7a93d6ceeacacae98b06ec18bbfd5bb"
	I0813 00:24:07.390842  717142 cri.go:76] found id: "c11b8a977685bc2516a3a180b7d7e5a078649d5b9c68db67af64bdbf0438193c"
	I0813 00:24:07.390854  717142 cri.go:76] found id: "2efebee19d7a6bd77fd1333dab2cc543c575e8c6babdae865b90a1cf0fa48744"
	I0813 00:24:07.390859  717142 cri.go:76] found id: "3874ae5baf2d856570fa5534f52778b464323afbd92eca54de5a983517dbbb65"
	I0813 00:24:07.390864  717142 cri.go:76] found id: "3afc8c09f828616463f8d4246cdb7a602c45569e04de078f3b507b5df49993e8"
	I0813 00:24:07.390871  717142 cri.go:76] found id: ""
	I0813 00:24:07.390930  717142 ssh_runner.go:149] Run: sudo runc --root /run/containerd/runc/k8s.io list -f json
	I0813 00:24:07.430219  717142 cri.go:103] JSON = [{"ociVersion":"1.0.2-dev","id":"2c9091a6bd8d73b0f55e45a516ab950e484c7e80697c4dbbcdf6f65909dafc9c","pid":3922,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/2c9091a6bd8d73b0f55e45a516ab950e484c7e80697c4dbbcdf6f65909dafc9c","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/2c9091a6bd8d73b0f55e45a516ab950e484c7e80697c4dbbcdf6f65909dafc9c/rootfs","created":"2021-08-13T00:23:14.025881684Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-id":"2c9091a6bd8d73b0f55e45a516ab950e484c7e80697c4dbbcdf6f65909dafc9c","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-scheduler-pause-20210813001951-679351_827c3abded97b0f25c66fba9223b4c18"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"46f36163c53e1f9abe41a73a7c5bcd16715af4061345c6bf366e4d73e9771238","pid":4047,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/46f36163c53e1f9ab
e41a73a7c5bcd16715af4061345c6bf366e4d73e9771238","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/46f36163c53e1f9abe41a73a7c5bcd16715af4061345c6bf366e4d73e9771238/rootfs","created":"2021-08-13T00:23:15.111504623Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-id":"46f36163c53e1f9abe41a73a7c5bcd16715af4061345c6bf366e4d73e9771238","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-proxy-2mkpr_59d9290e-34c7-4e80-a909-8d989552ec78"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"4a6e63928b11fedafa692c25459f3246b10afd3d4379e021576f2665ed408565","pid":4657,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/4a6e63928b11fedafa692c25459f3246b10afd3d4379e021576f2665ed408565","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/4a6e63928b11fedafa692c25459f3246b10afd3d4379e021576f2665ed408565/rootfs","created":"2021-08-13T00:23:30.022843176Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","
io.kubernetes.cri.sandbox-id":"4a6e63928b11fedafa692c25459f3246b10afd3d4379e021576f2665ed408565","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_storage-provisioner_b781b362-9644-4c96-a463-4cb61bc5ab58"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"6204b21ab9c2c6e799da39874e9eb93e39284c65231dd05603088db2fa6b8e6b","pid":4425,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/6204b21ab9c2c6e799da39874e9eb93e39284c65231dd05603088db2fa6b8e6b","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/6204b21ab9c2c6e799da39874e9eb93e39284c65231dd05603088db2fa6b8e6b/rootfs","created":"2021-08-13T00:23:18.323409933Z","annotations":{"io.kubernetes.cri.container-name":"kube-proxy","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.sandbox-id":"46f36163c53e1f9abe41a73a7c5bcd16715af4061345c6bf366e4d73e9771238"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"64c935095bced983323337af866a47ac732cfe3496f8dfd31387b8833f7cc6c0","pid":4306,"status":"run
ning","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/64c935095bced983323337af866a47ac732cfe3496f8dfd31387b8833f7cc6c0","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/64c935095bced983323337af866a47ac732cfe3496f8dfd31387b8833f7cc6c0/rootfs","created":"2021-08-13T00:23:16.307393022Z","annotations":{"io.kubernetes.cri.container-name":"kube-scheduler","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.sandbox-id":"2c9091a6bd8d73b0f55e45a516ab950e484c7e80697c4dbbcdf6f65909dafc9c"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"705c524b7bd2d071e133ec74fb1f433cab624312145e8e0d2e4b19e7936be85b","pid":4402,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/705c524b7bd2d071e133ec74fb1f433cab624312145e8e0d2e4b19e7936be85b","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/705c524b7bd2d071e133ec74fb1f433cab624312145e8e0d2e4b19e7936be85b/rootfs","created":"2021-08-13T00:23:17.653178958Z","annotations":{"io.kubernetes.cri.container-n
ame":"etcd","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.sandbox-id":"accc3895e9638677ea28570ea79a089ad8ba2c46e4ceb8dd1f082776f96062e7"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"7899222a89a71ffe4fb8d232a5b3799b5c94ea7e430406ce44654a4c80946ed9","pid":4694,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/7899222a89a71ffe4fb8d232a5b3799b5c94ea7e430406ce44654a4c80946ed9","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/7899222a89a71ffe4fb8d232a5b3799b5c94ea7e430406ce44654a4c80946ed9/rootfs","created":"2021-08-13T00:23:30.735130787Z","annotations":{"io.kubernetes.cri.container-name":"storage-provisioner","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.sandbox-id":"4a6e63928b11fedafa692c25459f3246b10afd3d4379e021576f2665ed408565"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"85bd885bbae1eb206d16fda70dde9e78726f0563495bc0c5f64cf2083b9a7bf7","pid":4289,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.t
ask/k8s.io/85bd885bbae1eb206d16fda70dde9e78726f0563495bc0c5f64cf2083b9a7bf7","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/85bd885bbae1eb206d16fda70dde9e78726f0563495bc0c5f64cf2083b9a7bf7/rootfs","created":"2021-08-13T00:23:16.74740056Z","annotations":{"io.kubernetes.cri.container-name":"kube-apiserver","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.sandbox-id":"bb4e7930b2ada4f1ec65421ed5d4b0b59b255e285b95240391d8d4e71d344e95"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"a4a4ec1132e56e93b77cf82333943e5daaab8efa48df65ced586f5a324cde37d","pid":4764,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/a4a4ec1132e56e93b77cf82333943e5daaab8efa48df65ced586f5a324cde37d","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/a4a4ec1132e56e93b77cf82333943e5daaab8efa48df65ced586f5a324cde37d/rootfs","created":"2021-08-13T00:23:45.351615619Z","annotations":{"io.kubernetes.cri.container-name":"kube-controller-manager","io.kubernetes.cri.container
-type":"container","io.kubernetes.cri.sandbox-id":"a8ae77b235803b4e19e0eb0a6e8e4d70a30100102282506be869694a0b95d264"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"a8ae77b235803b4e19e0eb0a6e8e4d70a30100102282506be869694a0b95d264","pid":3973,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/a8ae77b235803b4e19e0eb0a6e8e4d70a30100102282506be869694a0b95d264","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/a8ae77b235803b4e19e0eb0a6e8e4d70a30100102282506be869694a0b95d264/rootfs","created":"2021-08-13T00:23:14.162109109Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-id":"a8ae77b235803b4e19e0eb0a6e8e4d70a30100102282506be869694a0b95d264","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-controller-manager-pause-20210813001951-679351_f90cf81553c0bced79ef2f705df65c51"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"accc3895e9638677ea28570ea79a089ad8ba2c46e4ceb8dd1f082776f96062e7","pid":4034,"status":"runni
ng","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/accc3895e9638677ea28570ea79a089ad8ba2c46e4ceb8dd1f082776f96062e7","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/accc3895e9638677ea28570ea79a089ad8ba2c46e4ceb8dd1f082776f96062e7/rootfs","created":"2021-08-13T00:23:14.597314012Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-id":"accc3895e9638677ea28570ea79a089ad8ba2c46e4ceb8dd1f082776f96062e7","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_etcd-pause-20210813001951-679351_ae2af969af5dffce0131cf735702505a"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"bb4e7930b2ada4f1ec65421ed5d4b0b59b255e285b95240391d8d4e71d344e95","pid":4024,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/bb4e7930b2ada4f1ec65421ed5d4b0b59b255e285b95240391d8d4e71d344e95","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/bb4e7930b2ada4f1ec65421ed5d4b0b59b255e285b95240391d8d4e71d344e95/rootfs","created
":"2021-08-13T00:23:14.465535488Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-id":"bb4e7930b2ada4f1ec65421ed5d4b0b59b255e285b95240391d8d4e71d344e95","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-apiserver-pause-20210813001951-679351_058544a8a5810508caf0af791704b304"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"d30696c5405d8e3fbc2bfe7ef7e391b98c301d6056d08f1d32a9614f101edc6f","pid":4465,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/d30696c5405d8e3fbc2bfe7ef7e391b98c301d6056d08f1d32a9614f101edc6f","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/d30696c5405d8e3fbc2bfe7ef7e391b98c301d6056d08f1d32a9614f101edc6f/rootfs","created":"2021-08-13T00:23:18.584370342Z","annotations":{"io.kubernetes.cri.container-name":"coredns","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.sandbox-id":"f5e6ffa407fcd7bad9fbd18bd48a2b0e8ac87614f276855e14b1c4ee6603854c"},"owner":"root"},{"ociVersi
on":"1.0.2-dev","id":"f5e6ffa407fcd7bad9fbd18bd48a2b0e8ac87614f276855e14b1c4ee6603854c","pid":4322,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/f5e6ffa407fcd7bad9fbd18bd48a2b0e8ac87614f276855e14b1c4ee6603854c","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/f5e6ffa407fcd7bad9fbd18bd48a2b0e8ac87614f276855e14b1c4ee6603854c/rootfs","created":"2021-08-13T00:23:16.810022574Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-id":"f5e6ffa407fcd7bad9fbd18bd48a2b0e8ac87614f276855e14b1c4ee6603854c","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_coredns-558bd4d5db-xjmwl_5897a243-0289-4042-882a-d25cb005813b"},"owner":"root"}]
	I0813 00:24:07.430481  717142 cri.go:113] list returned 14 containers
	I0813 00:24:07.430498  717142 cri.go:116] container: {ID:2c9091a6bd8d73b0f55e45a516ab950e484c7e80697c4dbbcdf6f65909dafc9c Status:running}
	I0813 00:24:07.430517  717142 cri.go:118] skipping 2c9091a6bd8d73b0f55e45a516ab950e484c7e80697c4dbbcdf6f65909dafc9c - not in ps
	I0813 00:24:07.430526  717142 cri.go:116] container: {ID:46f36163c53e1f9abe41a73a7c5bcd16715af4061345c6bf366e4d73e9771238 Status:running}
	I0813 00:24:07.430534  717142 cri.go:118] skipping 46f36163c53e1f9abe41a73a7c5bcd16715af4061345c6bf366e4d73e9771238 - not in ps
	I0813 00:24:07.430540  717142 cri.go:116] container: {ID:4a6e63928b11fedafa692c25459f3246b10afd3d4379e021576f2665ed408565 Status:running}
	I0813 00:24:07.430547  717142 cri.go:118] skipping 4a6e63928b11fedafa692c25459f3246b10afd3d4379e021576f2665ed408565 - not in ps
	I0813 00:24:07.430554  717142 cri.go:116] container: {ID:6204b21ab9c2c6e799da39874e9eb93e39284c65231dd05603088db2fa6b8e6b Status:running}
	I0813 00:24:07.430561  717142 cri.go:116] container: {ID:64c935095bced983323337af866a47ac732cfe3496f8dfd31387b8833f7cc6c0 Status:running}
	I0813 00:24:07.430568  717142 cri.go:116] container: {ID:705c524b7bd2d071e133ec74fb1f433cab624312145e8e0d2e4b19e7936be85b Status:running}
	I0813 00:24:07.430573  717142 cri.go:116] container: {ID:7899222a89a71ffe4fb8d232a5b3799b5c94ea7e430406ce44654a4c80946ed9 Status:running}
	I0813 00:24:07.430582  717142 cri.go:116] container: {ID:85bd885bbae1eb206d16fda70dde9e78726f0563495bc0c5f64cf2083b9a7bf7 Status:running}
	I0813 00:24:07.430590  717142 cri.go:116] container: {ID:a4a4ec1132e56e93b77cf82333943e5daaab8efa48df65ced586f5a324cde37d Status:running}
	I0813 00:24:07.430598  717142 cri.go:116] container: {ID:a8ae77b235803b4e19e0eb0a6e8e4d70a30100102282506be869694a0b95d264 Status:running}
	I0813 00:24:07.430605  717142 cri.go:118] skipping a8ae77b235803b4e19e0eb0a6e8e4d70a30100102282506be869694a0b95d264 - not in ps
	I0813 00:24:07.430611  717142 cri.go:116] container: {ID:accc3895e9638677ea28570ea79a089ad8ba2c46e4ceb8dd1f082776f96062e7 Status:running}
	I0813 00:24:07.430628  717142 cri.go:118] skipping accc3895e9638677ea28570ea79a089ad8ba2c46e4ceb8dd1f082776f96062e7 - not in ps
	I0813 00:24:07.430636  717142 cri.go:116] container: {ID:bb4e7930b2ada4f1ec65421ed5d4b0b59b255e285b95240391d8d4e71d344e95 Status:running}
	I0813 00:24:07.430643  717142 cri.go:118] skipping bb4e7930b2ada4f1ec65421ed5d4b0b59b255e285b95240391d8d4e71d344e95 - not in ps
	I0813 00:24:07.430649  717142 cri.go:116] container: {ID:d30696c5405d8e3fbc2bfe7ef7e391b98c301d6056d08f1d32a9614f101edc6f Status:running}
	I0813 00:24:07.430654  717142 cri.go:116] container: {ID:f5e6ffa407fcd7bad9fbd18bd48a2b0e8ac87614f276855e14b1c4ee6603854c Status:running}
	I0813 00:24:07.430658  717142 cri.go:118] skipping f5e6ffa407fcd7bad9fbd18bd48a2b0e8ac87614f276855e14b1c4ee6603854c - not in ps
	I0813 00:24:07.430708  717142 ssh_runner.go:149] Run: sudo runc --root /run/containerd/runc/k8s.io pause 6204b21ab9c2c6e799da39874e9eb93e39284c65231dd05603088db2fa6b8e6b
	I0813 00:24:07.452932  717142 ssh_runner.go:149] Run: sudo runc --root /run/containerd/runc/k8s.io pause 64c935095bced983323337af866a47ac732cfe3496f8dfd31387b8833f7cc6c0
	I0813 00:24:07.474067  717142 ssh_runner.go:149] Run: sudo runc --root /run/containerd/runc/k8s.io pause 705c524b7bd2d071e133ec74fb1f433cab624312145e8e0d2e4b19e7936be85b
	I0813 00:24:09.120445  717142 out.go:177] 
	W0813 00:24:09.120726  717142 out.go:242] X Exiting due to GUEST_PAUSE: runc: sudo runc --root /run/containerd/runc/k8s.io pause 705c524b7bd2d071e133ec74fb1f433cab624312145e8e0d2e4b19e7936be85b: Process exited with status 1
	stdout:
	
	stderr:
	time="2021-08-13T00:24:07Z" level=error msg="unable to freeze"
	
	X Exiting due to GUEST_PAUSE: runc: sudo runc --root /run/containerd/runc/k8s.io pause 705c524b7bd2d071e133ec74fb1f433cab624312145e8e0d2e4b19e7936be85b: Process exited with status 1
	stdout:
	
	stderr:
	time="2021-08-13T00:24:07Z" level=error msg="unable to freeze"
	
	W0813 00:24:09.120749  717142 out.go:242] * 
	* 
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	W0813 00:24:09.159055  717142 out.go:242] ╭──────────────────────────────────────────────────────────────────────────────╮
	│                                                                              │
	│    * If the above advice does not help, please let us know:                  │
	│      https://github.com/kubernetes/minikube/issues/new/choose                │
	│                                                                              │
	│    * Please attach the following file to the GitHub issue:                   │
	│    * - /tmp/minikube_pause_49fdaea37aad8ebccb761973c21590cc64efe8d9_0.log    │
	│                                                                              │
	╰──────────────────────────────────────────────────────────────────────────────╯
	╭──────────────────────────────────────────────────────────────────────────────╮
	│                                                                              │
	│    * If the above advice does not help, please let us know:                  │
	│      https://github.com/kubernetes/minikube/issues/new/choose                │
	│                                                                              │
	│    * Please attach the following file to the GitHub issue:                   │
	│    * - /tmp/minikube_pause_49fdaea37aad8ebccb761973c21590cc64efe8d9_0.log    │
	│                                                                              │
	╰──────────────────────────────────────────────────────────────────────────────╯
	I0813 00:24:10.022229  717142 out.go:177] 

                                                
                                                
** /stderr **
pause_test.go:109: failed to pause minikube with args: "out/minikube-linux-amd64 pause -p pause-20210813001951-679351 --alsologtostderr -v=5" : exit status 80
helpers_test.go:223: -----------------------post-mortem--------------------------------
helpers_test.go:240: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Host}} -p pause-20210813001951-679351 -n pause-20210813001951-679351
helpers_test.go:240: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.Host}} -p pause-20210813001951-679351 -n pause-20210813001951-679351: exit status 2 (308.578748ms)

                                                
                                                
-- stdout --
	Running

                                                
                                                
-- /stdout --
helpers_test.go:240: status error: exit status 2 (may be ok)
helpers_test.go:245: <<< TestPause/serial/PauseAgain FAILED: start of post-mortem logs <<<
helpers_test.go:246: ======>  post-mortem[TestPause/serial/PauseAgain]: minikube logs <======
helpers_test.go:248: (dbg) Run:  out/minikube-linux-amd64 -p pause-20210813001951-679351 logs -n 25

                                                
                                                
=== CONT  TestPause/serial/PauseAgain
helpers_test.go:248: (dbg) Done: out/minikube-linux-amd64 -p pause-20210813001951-679351 logs -n 25: (1.402347725s)
helpers_test.go:253: TestPause/serial/PauseAgain logs: 
-- stdout --
	* 
	* ==> Audit <==
	* |---------|------------------------------------------|------------------------------------------|---------|---------|-------------------------------|-------------------------------|
	| Command |                   Args                   |                 Profile                  |  User   | Version |          Start Time           |           End Time            |
	|---------|------------------------------------------|------------------------------------------|---------|---------|-------------------------------|-------------------------------|
	| start   | -p                                       | scheduled-stop-20210813001820-679351     | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:18:20 UTC | Fri, 13 Aug 2021 00:19:22 UTC |
	|         | scheduled-stop-20210813001820-679351     |                                          |         |         |                               |                               |
	|         | --memory=2048 --driver=kvm2              |                                          |         |         |                               |                               |
	|         | --container-runtime=containerd           |                                          |         |         |                               |                               |
	| stop    | -p                                       | scheduled-stop-20210813001820-679351     | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:19:22 UTC | Fri, 13 Aug 2021 00:19:22 UTC |
	|         | scheduled-stop-20210813001820-679351     |                                          |         |         |                               |                               |
	|         | --cancel-scheduled                       |                                          |         |         |                               |                               |
	| stop    | -p                                       | scheduled-stop-20210813001820-679351     | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:19:35 UTC | Fri, 13 Aug 2021 00:19:42 UTC |
	|         | scheduled-stop-20210813001820-679351     |                                          |         |         |                               |                               |
	|         | --schedule 5s                            |                                          |         |         |                               |                               |
	| delete  | -p                                       | scheduled-stop-20210813001820-679351     | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:19:50 UTC | Fri, 13 Aug 2021 00:19:51 UTC |
	|         | scheduled-stop-20210813001820-679351     |                                          |         |         |                               |                               |
	| start   | -p                                       | force-systemd-env-20210813001951-679351  | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:19:51 UTC | Fri, 13 Aug 2021 00:21:03 UTC |
	|         | force-systemd-env-20210813001951-679351  |                                          |         |         |                               |                               |
	|         | --memory=2048 --alsologtostderr          |                                          |         |         |                               |                               |
	|         | -v=5 --driver=kvm2                       |                                          |         |         |                               |                               |
	|         | --container-runtime=containerd           |                                          |         |         |                               |                               |
	| -p      | force-systemd-env-20210813001951-679351  | force-systemd-env-20210813001951-679351  | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:21:03 UTC | Fri, 13 Aug 2021 00:21:04 UTC |
	|         | ssh cat /etc/containerd/config.toml      |                                          |         |         |                               |                               |
	| delete  | -p                                       | force-systemd-env-20210813001951-679351  | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:21:04 UTC | Fri, 13 Aug 2021 00:21:05 UTC |
	|         | force-systemd-env-20210813001951-679351  |                                          |         |         |                               |                               |
	| delete  | -p                                       | kubenet-20210813002105-679351            | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:21:05 UTC | Fri, 13 Aug 2021 00:21:05 UTC |
	|         | kubenet-20210813002105-679351            |                                          |         |         |                               |                               |
	| delete  | -p false-20210813002105-679351           | false-20210813002105-679351              | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:21:05 UTC | Fri, 13 Aug 2021 00:21:05 UTC |
	| start   | -p                                       | offline-containerd-20210813001951-679351 | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:19:51 UTC | Fri, 13 Aug 2021 00:22:10 UTC |
	|         | offline-containerd-20210813001951-679351 |                                          |         |         |                               |                               |
	|         | --alsologtostderr -v=1 --memory=2048     |                                          |         |         |                               |                               |
	|         | --wait=true --driver=kvm2                |                                          |         |         |                               |                               |
	|         | --container-runtime=containerd           |                                          |         |         |                               |                               |
	| delete  | -p                                       | offline-containerd-20210813001951-679351 | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:22:10 UTC | Fri, 13 Aug 2021 00:22:11 UTC |
	|         | offline-containerd-20210813001951-679351 |                                          |         |         |                               |                               |
	| start   | -p                                       | force-systemd-flag-20210813002108-679351 | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:21:08 UTC | Fri, 13 Aug 2021 00:22:38 UTC |
	|         | force-systemd-flag-20210813002108-679351 |                                          |         |         |                               |                               |
	|         | --memory=2048 --force-systemd            |                                          |         |         |                               |                               |
	|         | --alsologtostderr -v=5 --driver=kvm2     |                                          |         |         |                               |                               |
	|         | --container-runtime=containerd           |                                          |         |         |                               |                               |
	| -p      | force-systemd-flag-20210813002108-679351 | force-systemd-flag-20210813002108-679351 | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:22:38 UTC | Fri, 13 Aug 2021 00:22:38 UTC |
	|         | ssh cat /etc/containerd/config.toml      |                                          |         |         |                               |                               |
	| delete  | -p                                       | force-systemd-flag-20210813002108-679351 | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:22:38 UTC | Fri, 13 Aug 2021 00:22:40 UTC |
	|         | force-systemd-flag-20210813002108-679351 |                                          |         |         |                               |                               |
	| start   | -p pause-20210813001951-679351           | pause-20210813001951-679351              | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:19:51 UTC | Fri, 13 Aug 2021 00:22:49 UTC |
	|         | --memory=2048                            |                                          |         |         |                               |                               |
	|         | --install-addons=false                   |                                          |         |         |                               |                               |
	|         | --wait=all --driver=kvm2                 |                                          |         |         |                               |                               |
	|         | --container-runtime=containerd           |                                          |         |         |                               |                               |
	| start   | -p                                       | cert-options-20210813002211-679351       | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:22:11 UTC | Fri, 13 Aug 2021 00:23:29 UTC |
	|         | cert-options-20210813002211-679351       |                                          |         |         |                               |                               |
	|         | --memory=2048                            |                                          |         |         |                               |                               |
	|         | --apiserver-ips=127.0.0.1                |                                          |         |         |                               |                               |
	|         | --apiserver-ips=192.168.15.15            |                                          |         |         |                               |                               |
	|         | --apiserver-names=localhost              |                                          |         |         |                               |                               |
	|         | --apiserver-names=www.google.com         |                                          |         |         |                               |                               |
	|         | --apiserver-port=8555                    |                                          |         |         |                               |                               |
	|         | --driver=kvm2                            |                                          |         |         |                               |                               |
	|         | --container-runtime=containerd           |                                          |         |         |                               |                               |
	| -p      | cert-options-20210813002211-679351       | cert-options-20210813002211-679351       | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:23:29 UTC | Fri, 13 Aug 2021 00:23:29 UTC |
	|         | ssh openssl x509 -text -noout -in        |                                          |         |         |                               |                               |
	|         | /var/lib/minikube/certs/apiserver.crt    |                                          |         |         |                               |                               |
	| delete  | -p                                       | cert-options-20210813002211-679351       | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:23:30 UTC | Fri, 13 Aug 2021 00:23:31 UTC |
	|         | cert-options-20210813002211-679351       |                                          |         |         |                               |                               |
	| start   | -p                                       | stopped-upgrade-20210813001951-679351    | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:22:29 UTC | Fri, 13 Aug 2021 00:23:40 UTC |
	|         | stopped-upgrade-20210813001951-679351    |                                          |         |         |                               |                               |
	|         | --memory=2200 --alsologtostderr          |                                          |         |         |                               |                               |
	|         | -v=1 --driver=kvm2                       |                                          |         |         |                               |                               |
	|         | --container-runtime=containerd           |                                          |         |         |                               |                               |
	| logs    | -p                                       | stopped-upgrade-20210813001951-679351    | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:23:40 UTC | Fri, 13 Aug 2021 00:23:42 UTC |
	|         | stopped-upgrade-20210813001951-679351    |                                          |         |         |                               |                               |
	| delete  | -p                                       | stopped-upgrade-20210813001951-679351    | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:23:42 UTC | Fri, 13 Aug 2021 00:23:43 UTC |
	|         | stopped-upgrade-20210813001951-679351    |                                          |         |         |                               |                               |
	| start   | -p pause-20210813001951-679351           | pause-20210813001951-679351              | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:22:49 UTC | Fri, 13 Aug 2021 00:23:59 UTC |
	|         | --alsologtostderr                        |                                          |         |         |                               |                               |
	|         | -v=1 --driver=kvm2                       |                                          |         |         |                               |                               |
	|         | --container-runtime=containerd           |                                          |         |         |                               |                               |
	| pause   | -p pause-20210813001951-679351           | pause-20210813001951-679351              | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:23:59 UTC | Fri, 13 Aug 2021 00:24:00 UTC |
	|         | --alsologtostderr -v=5                   |                                          |         |         |                               |                               |
	| unpause | -p pause-20210813001951-679351           | pause-20210813001951-679351              | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:24:00 UTC | Fri, 13 Aug 2021 00:24:03 UTC |
	|         | --alsologtostderr -v=5                   |                                          |         |         |                               |                               |
	| start   | -p                                       | kubernetes-upgrade-20210813002240-679351 | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:22:40 UTC | Fri, 13 Aug 2021 00:24:04 UTC |
	|         | kubernetes-upgrade-20210813002240-679351 |                                          |         |         |                               |                               |
	|         | --memory=2200                            |                                          |         |         |                               |                               |
	|         | --kubernetes-version=v1.14.0             |                                          |         |         |                               |                               |
	|         | --alsologtostderr -v=1 --driver=kvm2     |                                          |         |         |                               |                               |
	|         | --container-runtime=containerd           |                                          |         |         |                               |                               |
	|---------|------------------------------------------|------------------------------------------|---------|---------|-------------------------------|-------------------------------|
	
	* 
	* ==> Last Start <==
	* Log file created at: 2021/08/13 00:23:43
	Running on machine: debian-jenkins-agent-10
	Binary: Built with gc go1.16.7 for linux/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0813 00:23:43.503390  716767 out.go:298] Setting OutFile to fd 1 ...
	I0813 00:23:43.503468  716767 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0813 00:23:43.503471  716767 out.go:311] Setting ErrFile to fd 2...
	I0813 00:23:43.503474  716767 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0813 00:23:43.503593  716767 root.go:313] Updating PATH: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/bin
	I0813 00:23:43.503852  716767 out.go:305] Setting JSON to false
	I0813 00:23:43.540034  716767 start.go:111] hostinfo: {"hostname":"debian-jenkins-agent-10","uptime":14787,"bootTime":1628799437,"procs":199,"os":"linux","platform":"debian","platformFamily":"debian","platformVersion":"9.13","kernelVersion":"4.9.0-16-amd64","kernelArch":"x86_64","virtualizationSystem":"kvm","virtualizationRole":"guest","hostId":"c29e0b88-ef83-6765-d2fa-208fdce1af32"}
	I0813 00:23:43.540691  716767 start.go:121] virtualization: kvm guest
	I0813 00:23:43.543734  716767 out.go:177] * [auto-20210813002105-679351] minikube v1.22.0 on Debian 9.13 (kvm/amd64)
	I0813 00:23:43.545313  716767 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/kubeconfig
	I0813 00:23:43.543908  716767 notify.go:169] Checking for updates...
	I0813 00:23:43.546871  716767 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-amd64
	I0813 00:23:43.548308  716767 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube
	I0813 00:23:43.549680  716767 out.go:177]   - MINIKUBE_LOCATION=12230
	I0813 00:23:43.550290  716767 driver.go:335] Setting default libvirt URI to qemu:///system
	I0813 00:23:43.581612  716767 out.go:177] * Using the kvm2 driver based on user configuration
	I0813 00:23:43.581645  716767 start.go:278] selected driver: kvm2
	I0813 00:23:43.581651  716767 start.go:751] validating driver "kvm2" against <nil>
	I0813 00:23:43.581671  716767 start.go:762] status for kvm2: {Installed:true Healthy:true Running:true NeedsImprovement:false Error:<nil> Reason: Fix: Doc:}
	I0813 00:23:43.582857  716767 install.go:52] acquiring lock: {Name:mk900956b073697a4aa6c80a27c6bb0742a99a53 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0813 00:23:43.583045  716767 install.go:117] Validating docker-machine-driver-kvm2, PATH=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/bin:/home/jenkins/workspace/KVM_Linux_containerd_integration/out/:/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games:/usr/local/go/bin:/home/jenkins/go/bin:/usr/local/bin/:/usr/local/go/bin/:/home/jenkins/go/bin
	I0813 00:23:43.596309  716767 install.go:137] /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2 version is 1.22.0
	I0813 00:23:43.596386  716767 start_flags.go:263] no existing cluster config was found, will generate one from the flags 
	I0813 00:23:43.596590  716767 start_flags.go:697] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0813 00:23:43.596631  716767 cni.go:93] Creating CNI manager for ""
	I0813 00:23:43.596642  716767 cni.go:163] "kvm2" driver + containerd runtime found, recommending bridge
	I0813 00:23:43.596653  716767 start_flags.go:272] Found "bridge CNI" CNI - setting NetworkPlugin=cni
	I0813 00:23:43.596674  716767 start_flags.go:277] config:
	{Name:auto-20210813002105-679351 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.25@sha256:6f936e3443b95cd918d77623bf7b595653bb382766e280290a02b4a349e88b79 Memory:2048 CPUs:2 DiskSize:20000 VMDriver: Driver:kvm2 HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.99.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.21.3 ClusterName:auto-20210813002105-679351 Namespace:default APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: Ne
tworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI: NodeIP: NodePort:8443 NodeName:} Nodes:[] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:5m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: MultiNodeRequested:false ExtraDisks:0}
	I0813 00:23:43.596829  716767 iso.go:123] acquiring lock: {Name:mke80f4e00d5590a17349e0875191e5cd211cb9b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0813 00:23:39.698092  716165 pod_ready.go:102] pod "kube-controller-manager-pause-20210813001951-679351" in "kube-system" namespace has status "Ready":"False"
	I0813 00:23:42.199438  716165 pod_ready.go:102] pod "kube-controller-manager-pause-20210813001951-679351" in "kube-system" namespace has status "Ready":"False"
	I0813 00:23:46.628507  716004 out.go:204]   - Configuring RBAC rules ...
	I0813 00:23:47.055082  716004 cni.go:93] Creating CNI manager for ""
	I0813 00:23:47.055113  716004 cni.go:163] "kvm2" driver + containerd runtime found, recommending bridge
	I0813 00:23:47.057109  716004 out.go:177] * Configuring bridge CNI (Container Networking Interface) ...
	I0813 00:23:47.057193  716004 ssh_runner.go:149] Run: sudo mkdir -p /etc/cni/net.d
	I0813 00:23:47.065594  716004 ssh_runner.go:316] scp memory --> /etc/cni/net.d/1-k8s.conflist (457 bytes)
	I0813 00:23:47.082382  716004 ssh_runner.go:149] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0813 00:23:47.082440  716004 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.14.0/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:23:47.082452  716004 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.14.0/kubectl label nodes minikube.k8s.io/version=v1.22.0 minikube.k8s.io/commit=dc1c3ca26e9449ce488a773126b8450402c94a19 minikube.k8s.io/name=kubernetes-upgrade-20210813002240-679351 minikube.k8s.io/updated_at=2021_08_13T00_23_47_0700 --all --overwrite --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:23:47.107481  716004 ops.go:34] apiserver oom_adj: 16
	I0813 00:23:47.107506  716004 ops.go:39] adjusting apiserver oom_adj to -10
	I0813 00:23:47.107540  716004 ssh_runner.go:149] Run: /bin/bash -c "echo -10 | sudo tee /proc/$(pgrep kube-apiserver)/oom_adj"
	I0813 00:23:47.458289  716004 kubeadm.go:985] duration metric: took 375.90634ms to wait for elevateKubeSystemPrivileges.
	I0813 00:23:47.458364  716004 kubeadm.go:392] StartCluster complete in 17.107930015s
	I0813 00:23:47.458404  716004 settings.go:142] acquiring lock: {Name:mk513992707531c891d59a503efeac355a20c006 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0813 00:23:47.458521  716004 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/kubeconfig
	I0813 00:23:47.460307  716004 lock.go:36] WriteFile acquiring /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/kubeconfig: {Name:mk4539f4325bfd6eb26b6ddb5c7e1835c2548cd2 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0813 00:23:47.461424  716004 kapi.go:59] client config for kubernetes-upgrade-20210813002240-679351: &rest.Config{Host:"https://192.168.50.136:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kubernetes-upgrade-20210813002240-679351/client.crt", KeyFile:"/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/pr
ofiles/kubernetes-upgrade-20210813002240-679351/client.key", CAFile:"/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x17e2a80), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0813 00:23:47.990899  716004 kapi.go:244] deployment "coredns" in namespace "kube-system" and context "kubernetes-upgrade-20210813002240-679351" rescaled to 1
	I0813 00:23:47.990962  716004 start.go:226] Will wait 6m0s for node &{Name: IP:192.168.50.136 Port:8443 KubernetesVersion:v1.14.0 ControlPlane:true Worker:true}
	I0813 00:23:47.993234  716004 out.go:177] * Verifying Kubernetes components...
	I0813 00:23:47.993294  716004 ssh_runner.go:149] Run: sudo systemctl is-active --quiet service kubelet
	I0813 00:23:47.991080  716004 ssh_runner.go:149] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.14.0/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0813 00:23:47.991086  716004 addons.go:342] enableAddons start: toEnable=map[], additional=[]
	I0813 00:23:47.993410  716004 addons.go:59] Setting storage-provisioner=true in profile "kubernetes-upgrade-20210813002240-679351"
	I0813 00:23:47.993436  716004 addons.go:135] Setting addon storage-provisioner=true in "kubernetes-upgrade-20210813002240-679351"
	W0813 00:23:47.993448  716004 addons.go:147] addon storage-provisioner should already be in state true
	I0813 00:23:47.993443  716004 addons.go:59] Setting default-storageclass=true in profile "kubernetes-upgrade-20210813002240-679351"
	I0813 00:23:47.993463  716004 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "kubernetes-upgrade-20210813002240-679351"
	I0813 00:23:47.993480  716004 host.go:66] Checking if "kubernetes-upgrade-20210813002240-679351" exists ...
	I0813 00:23:47.993992  716004 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:23:47.994005  716004 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:23:47.994039  716004 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:23:47.994153  716004 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:23:48.006889  716004 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:32837
	I0813 00:23:48.007383  716004 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:23:48.007952  716004 main.go:130] libmachine: Using API Version  1
	I0813 00:23:48.007979  716004 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:23:48.008377  716004 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:23:48.008533  716004 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:41115
	I0813 00:23:48.008595  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .GetState
	I0813 00:23:48.008997  716004 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:23:48.009614  716004 main.go:130] libmachine: Using API Version  1
	I0813 00:23:48.009645  716004 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:23:48.010049  716004 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:23:48.010666  716004 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:23:48.010719  716004 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:23:48.013509  716004 kapi.go:59] client config for kubernetes-upgrade-20210813002240-679351: &rest.Config{Host:"https://192.168.50.136:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kubernetes-upgrade-20210813002240-679351/client.crt", KeyFile:"/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/pr
ofiles/kubernetes-upgrade-20210813002240-679351/client.key", CAFile:"/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x17e2a80), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0813 00:23:48.021992  716004 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:38909
	I0813 00:23:48.022484  716004 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:23:48.022980  716004 main.go:130] libmachine: Using API Version  1
	I0813 00:23:48.023002  716004 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:23:48.023179  716004 addons.go:135] Setting addon default-storageclass=true in "kubernetes-upgrade-20210813002240-679351"
	W0813 00:23:48.023202  716004 addons.go:147] addon default-storageclass should already be in state true
	I0813 00:23:48.023234  716004 host.go:66] Checking if "kubernetes-upgrade-20210813002240-679351" exists ...
	I0813 00:23:48.023382  716004 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:23:48.023527  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .GetState
	I0813 00:23:48.023657  716004 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:23:48.023705  716004 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:23:48.027027  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .DriverName
	I0813 00:23:43.598996  716767 out.go:177] * Starting control plane node auto-20210813002105-679351 in cluster auto-20210813002105-679351
	I0813 00:23:43.599019  716767 preload.go:131] Checking if preload exists for k8s version v1.21.3 and runtime containerd
	I0813 00:23:43.599064  716767 preload.go:147] Found local preload: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v11-v1.21.3-containerd-overlay2-amd64.tar.lz4
	I0813 00:23:43.599089  716767 cache.go:56] Caching tarball of preloaded images
	I0813 00:23:43.599192  716767 preload.go:173] Found /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v11-v1.21.3-containerd-overlay2-amd64.tar.lz4 in cache, skipping download
	I0813 00:23:43.599219  716767 cache.go:59] Finished verifying existence of preloaded tar for  v1.21.3 on containerd
	I0813 00:23:43.599348  716767 profile.go:148] Saving config to /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/config.json ...
	I0813 00:23:43.599382  716767 lock.go:36] WriteFile acquiring /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/config.json: {Name:mk1aa685bcd0f4ba75f0a968f048380e5aff9662 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0813 00:23:43.599562  716767 cache.go:205] Successfully downloaded all kic artifacts
	I0813 00:23:43.599605  716767 start.go:313] acquiring machines lock for auto-20210813002105-679351: {Name:mk522658ca6319f8a1c60d46c1e97d60752e8eaa Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0813 00:23:48.029261  716004 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0813 00:23:48.029368  716004 addons.go:275] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0813 00:23:48.029383  716004 ssh_runner.go:316] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0813 00:23:48.029407  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .GetSSHHostname
	I0813 00:23:48.035762  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) DBG | domain kubernetes-upgrade-20210813002240-679351 has defined MAC address 52:54:00:60:d0:ea in network mk-kubernetes-upgrade-20210813002240-679351
	I0813 00:23:48.036076  716004 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:42729
	I0813 00:23:48.036211  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:60:d0:ea", ip: ""} in network mk-kubernetes-upgrade-20210813002240-679351: {Iface:virbr5 ExpiryTime:2021-08-13 01:23:04 +0000 UTC Type:0 Mac:52:54:00:60:d0:ea Iaid: IPaddr:192.168.50.136 Prefix:24 Hostname:kubernetes-upgrade-20210813002240-679351 Clientid:01:52:54:00:60:d0:ea}
	I0813 00:23:48.036241  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) DBG | domain kubernetes-upgrade-20210813002240-679351 has defined IP address 192.168.50.136 and MAC address 52:54:00:60:d0:ea in network mk-kubernetes-upgrade-20210813002240-679351
	I0813 00:23:48.036417  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .GetSSHPort
	I0813 00:23:48.036592  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .GetSSHKeyPath
	I0813 00:23:48.036688  716004 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:23:48.036725  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .GetSSHUsername
	I0813 00:23:48.036832  716004 sshutil.go:53] new ssh client: &{IP:192.168.50.136 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/kubernetes-upgrade-20210813002240-679351/id_rsa Username:docker}
	I0813 00:23:48.037184  716004 main.go:130] libmachine: Using API Version  1
	I0813 00:23:48.037211  716004 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:23:48.037592  716004 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:23:48.038274  716004 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:23:48.038339  716004 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:23:48.049879  716004 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:37019
	I0813 00:23:48.050283  716004 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:23:48.050784  716004 main.go:130] libmachine: Using API Version  1
	I0813 00:23:48.050821  716004 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:23:48.051201  716004 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:23:48.051381  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .GetState
	I0813 00:23:48.054477  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .DriverName
	I0813 00:23:48.054703  716004 addons.go:275] installing /etc/kubernetes/addons/storageclass.yaml
	I0813 00:23:48.054720  716004 ssh_runner.go:316] scp memory --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0813 00:23:48.054736  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .GetSSHHostname
	I0813 00:23:48.060467  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) DBG | domain kubernetes-upgrade-20210813002240-679351 has defined MAC address 52:54:00:60:d0:ea in network mk-kubernetes-upgrade-20210813002240-679351
	I0813 00:23:48.060946  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:60:d0:ea", ip: ""} in network mk-kubernetes-upgrade-20210813002240-679351: {Iface:virbr5 ExpiryTime:2021-08-13 01:23:04 +0000 UTC Type:0 Mac:52:54:00:60:d0:ea Iaid: IPaddr:192.168.50.136 Prefix:24 Hostname:kubernetes-upgrade-20210813002240-679351 Clientid:01:52:54:00:60:d0:ea}
	I0813 00:23:48.060984  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) DBG | domain kubernetes-upgrade-20210813002240-679351 has defined IP address 192.168.50.136 and MAC address 52:54:00:60:d0:ea in network mk-kubernetes-upgrade-20210813002240-679351
	I0813 00:23:48.061168  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .GetSSHPort
	I0813 00:23:48.061333  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .GetSSHKeyPath
	I0813 00:23:48.061483  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .GetSSHUsername
	I0813 00:23:48.061627  716004 sshutil.go:53] new ssh client: &{IP:192.168.50.136 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/kubernetes-upgrade-20210813002240-679351/id_rsa Username:docker}
	I0813 00:23:48.123399  716004 ssh_runner.go:149] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.14.0/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.50.1 host.minikube.internal\n           fallthrough\n        }' | sudo /var/lib/minikube/binaries/v1.14.0/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0813 00:23:48.124657  716004 kapi.go:59] client config for kubernetes-upgrade-20210813002240-679351: &rest.Config{Host:"https://192.168.50.136:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kubernetes-upgrade-20210813002240-679351/client.crt", KeyFile:"/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/pr
ofiles/kubernetes-upgrade-20210813002240-679351/client.key", CAFile:"/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x17e2a80), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0813 00:23:48.126985  716004 api_server.go:50] waiting for apiserver process to appear ...
	I0813 00:23:48.127046  716004 ssh_runner.go:149] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0813 00:23:48.139267  716004 ssh_runner.go:149] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.14.0/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0813 00:23:48.184291  716004 ssh_runner.go:149] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.14.0/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0813 00:23:48.712890  716004 start.go:736] {"host.minikube.internal": 192.168.50.1} host record injected into CoreDNS
	I0813 00:23:48.712931  716004 api_server.go:70] duration metric: took 721.940951ms to wait for apiserver process to appear ...
	I0813 00:23:48.712951  716004 api_server.go:86] waiting for apiserver healthz status ...
	I0813 00:23:48.712963  716004 api_server.go:239] Checking apiserver healthz at https://192.168.50.136:8443/healthz ...
	I0813 00:23:48.725603  716004 api_server.go:265] https://192.168.50.136:8443/healthz returned 200:
	ok
	I0813 00:23:48.726662  716004 api_server.go:139] control plane version: v1.14.0
	I0813 00:23:48.726686  716004 api_server.go:129] duration metric: took 13.728785ms to wait for apiserver health ...
	I0813 00:23:48.726696  716004 system_pods.go:43] waiting for kube-system pods to appear ...
	I0813 00:23:48.737220  716004 system_pods.go:59] 0 kube-system pods found
	I0813 00:23:48.737248  716004 retry.go:31] will retry after 305.063636ms: only 0 pod(s) have shown up
	I0813 00:23:48.814052  716004 main.go:130] libmachine: Making call to close driver server
	I0813 00:23:48.814093  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .Close
	I0813 00:23:48.814102  716004 main.go:130] libmachine: Making call to close driver server
	I0813 00:23:48.814123  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .Close
	I0813 00:23:48.814419  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) DBG | Closing plugin on server side
	I0813 00:23:48.814457  716004 main.go:130] libmachine: Successfully made call to close driver server
	I0813 00:23:48.814473  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) DBG | Closing plugin on server side
	I0813 00:23:48.814478  716004 main.go:130] libmachine: Making call to close connection to plugin binary
	I0813 00:23:48.814478  716004 main.go:130] libmachine: Successfully made call to close driver server
	I0813 00:23:48.814491  716004 main.go:130] libmachine: Making call to close driver server
	I0813 00:23:48.814502  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .Close
	I0813 00:23:48.814502  716004 main.go:130] libmachine: Making call to close connection to plugin binary
	I0813 00:23:48.814569  716004 main.go:130] libmachine: Making call to close driver server
	I0813 00:23:48.814578  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .Close
	I0813 00:23:48.814771  716004 main.go:130] libmachine: Successfully made call to close driver server
	I0813 00:23:48.814788  716004 main.go:130] libmachine: Making call to close connection to plugin binary
	I0813 00:23:48.816052  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) DBG | Closing plugin on server side
	I0813 00:23:48.816067  716004 main.go:130] libmachine: Successfully made call to close driver server
	I0813 00:23:48.816101  716004 main.go:130] libmachine: Making call to close connection to plugin binary
	I0813 00:23:48.816129  716004 main.go:130] libmachine: Making call to close driver server
	I0813 00:23:48.816150  716004 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .Close
	I0813 00:23:48.816368  716004 main.go:130] libmachine: Successfully made call to close driver server
	I0813 00:23:48.816382  716004 main.go:130] libmachine: Making call to close connection to plugin binary
	I0813 00:23:44.696016  716165 pod_ready.go:102] pod "kube-controller-manager-pause-20210813001951-679351" in "kube-system" namespace has status "Ready":"False"
	I0813 00:23:47.195507  716165 pod_ready.go:102] pod "kube-controller-manager-pause-20210813001951-679351" in "kube-system" namespace has status "Ready":"False"
	I0813 00:23:49.203255  716165 pod_ready.go:102] pod "kube-controller-manager-pause-20210813001951-679351" in "kube-system" namespace has status "Ready":"False"
	I0813 00:23:48.818579  716004 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0813 00:23:48.818606  716004 addons.go:344] enableAddons completed in 827.536284ms
	I0813 00:23:49.047612  716004 system_pods.go:59] 1 kube-system pods found
	I0813 00:23:49.047654  716004 system_pods.go:61] "storage-provisioner" [baa2edae-fbcc-11eb-bfc1-52540060d0ea] Pending: PodScheduled:Unschedulable (0/1 nodes are available: 1 node(s) had taints that the pod didn't tolerate.)
	I0813 00:23:49.047670  716004 retry.go:31] will retry after 338.212508ms: only 1 pod(s) have shown up
	I0813 00:23:49.390176  716004 system_pods.go:59] 1 kube-system pods found
	I0813 00:23:49.390210  716004 system_pods.go:61] "storage-provisioner" [baa2edae-fbcc-11eb-bfc1-52540060d0ea] Pending: PodScheduled:Unschedulable (0/1 nodes are available: 1 node(s) had taints that the pod didn't tolerate.)
	I0813 00:23:49.390224  716004 retry.go:31] will retry after 378.459802ms: only 1 pod(s) have shown up
	I0813 00:23:49.774922  716004 system_pods.go:59] 1 kube-system pods found
	I0813 00:23:49.774952  716004 system_pods.go:61] "storage-provisioner" [baa2edae-fbcc-11eb-bfc1-52540060d0ea] Pending: PodScheduled:Unschedulable (0/1 nodes are available: 1 node(s) had taints that the pod didn't tolerate.)
	I0813 00:23:49.774966  716004 retry.go:31] will retry after 469.882201ms: only 1 pod(s) have shown up
	I0813 00:23:50.249464  716004 system_pods.go:59] 1 kube-system pods found
	I0813 00:23:50.249502  716004 system_pods.go:61] "storage-provisioner" [baa2edae-fbcc-11eb-bfc1-52540060d0ea] Pending: PodScheduled:Unschedulable (0/1 nodes are available: 1 node(s) had taints that the pod didn't tolerate.)
	I0813 00:23:50.249536  716004 retry.go:31] will retry after 667.365439ms: only 1 pod(s) have shown up
	I0813 00:23:51.040492  716767 start.go:317] acquired machines lock for "auto-20210813002105-679351" in 7.440857589s
	I0813 00:23:51.040557  716767 start.go:89] Provisioning new machine with config: &{Name:auto-20210813002105-679351 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube-builds/iso/12122/minikube-v1.22.0-1628238775-12122.iso KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.25@sha256:6f936e3443b95cd918d77623bf7b595653bb382766e280290a02b4a349e88b79 Memory:2048 CPUs:2 DiskSize:20000 VMDriver: Driver:kvm2 HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.99.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.21.3 Clust
erName:auto-20210813002105-679351 Namespace:default APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI: NodeIP: NodePort:8443 NodeName:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.21.3 ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:5m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: MultiNodeRequested:false ExtraDisks:0} &{Name: IP: Port:8443 KubernetesVersion:v1.21.3 ControlPlane:true Worker:true}
	I0813 00:23:51.040704  716767 start.go:126] createHost starting for "" (driver="kvm2")
	I0813 00:23:51.044070  716767 out.go:204] * Creating kvm2 VM (CPUs=2, Memory=2048MB, Disk=20000MB) ...
	I0813 00:23:51.044287  716767 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:23:51.044349  716767 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:23:51.058778  716767 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:43465
	I0813 00:23:51.059220  716767 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:23:51.059811  716767 main.go:130] libmachine: Using API Version  1
	I0813 00:23:51.059836  716767 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:23:51.060332  716767 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:23:51.060537  716767 main.go:130] libmachine: (auto-20210813002105-679351) Calling .GetMachineName
	I0813 00:23:51.060720  716767 main.go:130] libmachine: (auto-20210813002105-679351) Calling .DriverName
	I0813 00:23:51.060904  716767 start.go:160] libmachine.API.Create for "auto-20210813002105-679351" (driver="kvm2")
	I0813 00:23:51.060935  716767 client.go:168] LocalClient.Create starting
	I0813 00:23:51.060976  716767 main.go:130] libmachine: Reading certificate data from /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/ca.pem
	I0813 00:23:51.061043  716767 main.go:130] libmachine: Decoding PEM data...
	I0813 00:23:51.061067  716767 main.go:130] libmachine: Parsing certificate...
	I0813 00:23:51.061228  716767 main.go:130] libmachine: Reading certificate data from /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/cert.pem
	I0813 00:23:51.061251  716767 main.go:130] libmachine: Decoding PEM data...
	I0813 00:23:51.061271  716767 main.go:130] libmachine: Parsing certificate...
	I0813 00:23:51.061330  716767 main.go:130] libmachine: Running pre-create checks...
	I0813 00:23:51.061345  716767 main.go:130] libmachine: (auto-20210813002105-679351) Calling .PreCreateCheck
	I0813 00:23:51.061718  716767 main.go:130] libmachine: (auto-20210813002105-679351) Calling .GetConfigRaw
	I0813 00:23:51.062177  716767 main.go:130] libmachine: Creating machine...
	I0813 00:23:51.062193  716767 main.go:130] libmachine: (auto-20210813002105-679351) Calling .Create
	I0813 00:23:51.062334  716767 main.go:130] libmachine: (auto-20210813002105-679351) Creating KVM machine...
	I0813 00:23:51.065144  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | found existing default KVM network
	I0813 00:23:51.067403  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:51.067189  716910 network.go:240] skipping subnet 192.168.39.0/24 that is taken: &{IP:192.168.39.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.39.0/24 Gateway:192.168.39.1 ClientMin:192.168.39.2 ClientMax:192.168.39.254 Broadcast:192.168.39.255 Interface:{IfaceName:virbr6 IfaceIPv4:192.168.39.1 IfaceMTU:1500 IfaceMAC:52:54:00:c9:12:3a}}
	I0813 00:23:51.069702  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:51.069605  716910 network.go:240] skipping subnet 192.168.50.0/24 that is taken: &{IP:192.168.50.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.50.0/24 Gateway:192.168.50.1 ClientMin:192.168.50.2 ClientMax:192.168.50.254 Broadcast:192.168.50.255 Interface:{IfaceName:virbr5 IfaceIPv4:192.168.50.1 IfaceMTU:1500 IfaceMAC:52:54:00:e7:55:97}}
	I0813 00:23:51.071022  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:51.070925  716910 network.go:240] skipping subnet 192.168.61.0/24 that is taken: &{IP:192.168.61.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.61.0/24 Gateway:192.168.61.1 ClientMin:192.168.61.2 ClientMax:192.168.61.254 Broadcast:192.168.61.255 Interface:{IfaceName:virbr3 IfaceIPv4:192.168.61.1 IfaceMTU:1500 IfaceMAC:52:54:00:ef:5a:89}}
	I0813 00:23:51.074043  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:51.073926  716910 network.go:240] skipping subnet 192.168.72.0/24 that is taken: &{IP:192.168.72.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.72.0/24 Gateway:192.168.72.1 ClientMin:192.168.72.2 ClientMax:192.168.72.254 Broadcast:192.168.72.255 Interface:{IfaceName:virbr4 IfaceIPv4:192.168.72.1 IfaceMTU:1500 IfaceMAC:52:54:00:3b:67:48}}
	I0813 00:23:51.075430  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:51.075356  716910 network.go:240] skipping subnet 192.168.83.0/24 that is taken: &{IP:192.168.83.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.83.0/24 Gateway:192.168.83.1 ClientMin:192.168.83.2 ClientMax:192.168.83.254 Broadcast:192.168.83.255 Interface:{IfaceName:virbr9 IfaceIPv4:192.168.83.1 IfaceMTU:1500 IfaceMAC:52:54:00:2a:df:7d}}
	I0813 00:23:51.077354  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:51.077276  716910 network.go:288] reserving subnet 192.168.94.0 for 1m0s: &{mu:{state:0 sema:0} read:{v:{m:map[] amended:true}} dirty:map[192.168.94.0:0xc00018c018] misses:0}
	I0813 00:23:51.077382  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:51.077313  716910 network.go:235] using free private subnet 192.168.94.0/24: &{IP:192.168.94.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.94.0/24 Gateway:192.168.94.1 ClientMin:192.168.94.2 ClientMax:192.168.94.254 Broadcast:192.168.94.255 Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:}}
	I0813 00:23:51.100332  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | trying to create private KVM network mk-auto-20210813002105-679351 192.168.94.0/24...
	I0813 00:23:51.334015  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | private KVM network mk-auto-20210813002105-679351 192.168.94.0/24 created
	I0813 00:23:51.334080  716767 main.go:130] libmachine: (auto-20210813002105-679351) Setting up store path in /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/auto-20210813002105-679351 ...
	I0813 00:23:51.334108  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:51.333962  716910 common.go:108] Making disk image using store path: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube
	I0813 00:23:51.334134  716767 main.go:130] libmachine: (auto-20210813002105-679351) Building disk image from file:///home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/cache/iso/minikube-v1.22.0-1628238775-12122.iso
	I0813 00:23:51.334270  716767 main.go:130] libmachine: (auto-20210813002105-679351) Downloading /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/cache/boot2docker.iso from file:///home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/cache/iso/minikube-v1.22.0-1628238775-12122.iso...
	I0813 00:23:51.534972  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:51.534844  716910 common.go:115] Creating ssh key: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/auto-20210813002105-679351/id_rsa...
	I0813 00:23:51.606821  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:51.606691  716910 common.go:121] Creating raw disk image: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/auto-20210813002105-679351/auto-20210813002105-679351.rawdisk...
	I0813 00:23:51.606866  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | Writing magic tar header
	I0813 00:23:51.606918  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | Writing SSH key tar header
	I0813 00:23:51.606957  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:51.606813  716910 common.go:135] Fixing permissions on /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/auto-20210813002105-679351 ...
	I0813 00:23:51.606992  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | Checking permissions on dir: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/auto-20210813002105-679351
	I0813 00:23:51.607009  716767 main.go:130] libmachine: (auto-20210813002105-679351) Setting executable bit set on /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/auto-20210813002105-679351 (perms=drwx------)
	I0813 00:23:51.607020  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | Checking permissions on dir: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines
	I0813 00:23:51.607036  716767 main.go:130] libmachine: (auto-20210813002105-679351) Setting executable bit set on /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines (perms=drwxr-xr-x)
	I0813 00:23:51.607046  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | Checking permissions on dir: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube
	I0813 00:23:51.607066  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | Checking permissions on dir: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b
	I0813 00:23:51.607076  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | Checking permissions on dir: /home/jenkins/minikube-integration
	I0813 00:23:51.607088  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | Checking permissions on dir: /home/jenkins
	I0813 00:23:51.607096  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | Checking permissions on dir: /home
	I0813 00:23:51.607104  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | Skipping /home - not owner
	I0813 00:23:51.607124  716767 main.go:130] libmachine: (auto-20210813002105-679351) Setting executable bit set on /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube (perms=drwxr-xr-x)
	I0813 00:23:51.607148  716767 main.go:130] libmachine: (auto-20210813002105-679351) Setting executable bit set on /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b (perms=drwxr-xr-x)
	I0813 00:23:51.607174  716767 main.go:130] libmachine: (auto-20210813002105-679351) Setting executable bit set on /home/jenkins/minikube-integration (perms=drwxr-xr-x)
	I0813 00:23:51.607181  716767 main.go:130] libmachine: (auto-20210813002105-679351) Setting executable bit set on /home/jenkins (perms=drwxr-xr-x)
	I0813 00:23:51.607194  716767 main.go:130] libmachine: (auto-20210813002105-679351) Creating domain...
	I0813 00:23:51.634568  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | domain auto-20210813002105-679351 has defined MAC address 52:54:00:d0:d0:55 in network default
	I0813 00:23:51.635205  716767 main.go:130] libmachine: (auto-20210813002105-679351) Ensuring networks are active...
	I0813 00:23:51.635226  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | domain auto-20210813002105-679351 has defined MAC address 52:54:00:8e:56:53 in network mk-auto-20210813002105-679351
	I0813 00:23:51.637544  716767 main.go:130] libmachine: (auto-20210813002105-679351) Ensuring network default is active
	I0813 00:23:51.637899  716767 main.go:130] libmachine: (auto-20210813002105-679351) Ensuring network mk-auto-20210813002105-679351 is active
	I0813 00:23:51.638587  716767 main.go:130] libmachine: (auto-20210813002105-679351) Getting domain xml...
	I0813 00:23:51.640716  716767 main.go:130] libmachine: (auto-20210813002105-679351) Creating domain...
	I0813 00:23:52.057654  716767 main.go:130] libmachine: (auto-20210813002105-679351) Waiting to get IP...
	I0813 00:23:52.058806  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | domain auto-20210813002105-679351 has defined MAC address 52:54:00:8e:56:53 in network mk-auto-20210813002105-679351
	I0813 00:23:52.059316  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | unable to find current IP address of domain auto-20210813002105-679351 in network mk-auto-20210813002105-679351
	I0813 00:23:52.059346  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:52.059258  716910 retry.go:31] will retry after 263.082536ms: waiting for machine to come up
	I0813 00:23:52.323639  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | domain auto-20210813002105-679351 has defined MAC address 52:54:00:8e:56:53 in network mk-auto-20210813002105-679351
	I0813 00:23:52.324237  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | unable to find current IP address of domain auto-20210813002105-679351 in network mk-auto-20210813002105-679351
	I0813 00:23:52.324335  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:52.324257  716910 retry.go:31] will retry after 381.329545ms: waiting for machine to come up
	I0813 00:23:52.706916  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | domain auto-20210813002105-679351 has defined MAC address 52:54:00:8e:56:53 in network mk-auto-20210813002105-679351
	I0813 00:23:52.707357  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | unable to find current IP address of domain auto-20210813002105-679351 in network mk-auto-20210813002105-679351
	I0813 00:23:52.707381  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:52.707310  716910 retry.go:31] will retry after 422.765636ms: waiting for machine to come up
	I0813 00:23:53.131665  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | domain auto-20210813002105-679351 has defined MAC address 52:54:00:8e:56:53 in network mk-auto-20210813002105-679351
	I0813 00:23:53.132237  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | unable to find current IP address of domain auto-20210813002105-679351 in network mk-auto-20210813002105-679351
	I0813 00:23:53.132270  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:53.132177  716910 retry.go:31] will retry after 473.074753ms: waiting for machine to come up
	I0813 00:23:51.700287  716165 pod_ready.go:102] pod "kube-controller-manager-pause-20210813001951-679351" in "kube-system" namespace has status "Ready":"False"
	I0813 00:23:54.233827  716165 pod_ready.go:102] pod "kube-controller-manager-pause-20210813001951-679351" in "kube-system" namespace has status "Ready":"False"
	I0813 00:23:50.920865  716004 system_pods.go:59] 1 kube-system pods found
	I0813 00:23:50.920912  716004 system_pods.go:61] "storage-provisioner" [baa2edae-fbcc-11eb-bfc1-52540060d0ea] Pending: PodScheduled:Unschedulable (0/1 nodes are available: 1 node(s) had taints that the pod didn't tolerate.)
	I0813 00:23:50.920930  716004 retry.go:31] will retry after 597.243124ms: only 1 pod(s) have shown up
	I0813 00:23:51.523438  716004 system_pods.go:59] 1 kube-system pods found
	I0813 00:23:51.523482  716004 system_pods.go:61] "storage-provisioner" [baa2edae-fbcc-11eb-bfc1-52540060d0ea] Pending: PodScheduled:Unschedulable (0/1 nodes are available: 1 node(s) had taints that the pod didn't tolerate.)
	I0813 00:23:51.523501  716004 retry.go:31] will retry after 789.889932ms: only 1 pod(s) have shown up
	I0813 00:23:52.318585  716004 system_pods.go:59] 1 kube-system pods found
	I0813 00:23:52.318621  716004 system_pods.go:61] "storage-provisioner" [baa2edae-fbcc-11eb-bfc1-52540060d0ea] Pending: PodScheduled:Unschedulable (0/1 nodes are available: 1 node(s) had taints that the pod didn't tolerate.)
	I0813 00:23:52.318637  716004 retry.go:31] will retry after 951.868007ms: only 1 pod(s) have shown up
	I0813 00:23:53.275480  716004 system_pods.go:59] 1 kube-system pods found
	I0813 00:23:53.275516  716004 system_pods.go:61] "storage-provisioner" [baa2edae-fbcc-11eb-bfc1-52540060d0ea] Pending: PodScheduled:Unschedulable (0/1 nodes are available: 1 node(s) had taints that the pod didn't tolerate.)
	I0813 00:23:53.275532  716004 retry.go:31] will retry after 1.341783893s: only 1 pod(s) have shown up
	I0813 00:23:54.621716  716004 system_pods.go:59] 1 kube-system pods found
	I0813 00:23:54.621756  716004 system_pods.go:61] "storage-provisioner" [baa2edae-fbcc-11eb-bfc1-52540060d0ea] Pending: PodScheduled:Unschedulable (0/1 nodes are available: 1 node(s) had taints that the pod didn't tolerate.)
	I0813 00:23:54.621775  716004 retry.go:31] will retry after 1.876813009s: only 1 pod(s) have shown up
	I0813 00:23:53.606587  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | domain auto-20210813002105-679351 has defined MAC address 52:54:00:8e:56:53 in network mk-auto-20210813002105-679351
	I0813 00:23:53.607105  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | unable to find current IP address of domain auto-20210813002105-679351 in network mk-auto-20210813002105-679351
	I0813 00:23:53.607132  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:53.607062  716910 retry.go:31] will retry after 587.352751ms: waiting for machine to come up
	I0813 00:23:54.195821  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | domain auto-20210813002105-679351 has defined MAC address 52:54:00:8e:56:53 in network mk-auto-20210813002105-679351
	I0813 00:23:54.196281  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | unable to find current IP address of domain auto-20210813002105-679351 in network mk-auto-20210813002105-679351
	I0813 00:23:54.196305  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:54.196240  716910 retry.go:31] will retry after 834.206799ms: waiting for machine to come up
	I0813 00:23:55.031715  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | domain auto-20210813002105-679351 has defined MAC address 52:54:00:8e:56:53 in network mk-auto-20210813002105-679351
	I0813 00:23:55.032162  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | unable to find current IP address of domain auto-20210813002105-679351 in network mk-auto-20210813002105-679351
	I0813 00:23:55.032197  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:55.032086  716910 retry.go:31] will retry after 746.553905ms: waiting for machine to come up
	I0813 00:23:55.779981  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | domain auto-20210813002105-679351 has defined MAC address 52:54:00:8e:56:53 in network mk-auto-20210813002105-679351
	I0813 00:23:55.780498  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | unable to find current IP address of domain auto-20210813002105-679351 in network mk-auto-20210813002105-679351
	I0813 00:23:55.780523  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:55.780446  716910 retry.go:31] will retry after 987.362415ms: waiting for machine to come up
	I0813 00:23:56.769757  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | domain auto-20210813002105-679351 has defined MAC address 52:54:00:8e:56:53 in network mk-auto-20210813002105-679351
	I0813 00:23:56.770266  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | unable to find current IP address of domain auto-20210813002105-679351 in network mk-auto-20210813002105-679351
	I0813 00:23:56.770300  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:56.770221  716910 retry.go:31] will retry after 1.189835008s: waiting for machine to come up
	I0813 00:23:57.961367  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | domain auto-20210813002105-679351 has defined MAC address 52:54:00:8e:56:53 in network mk-auto-20210813002105-679351
	I0813 00:23:57.962155  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | unable to find current IP address of domain auto-20210813002105-679351 in network mk-auto-20210813002105-679351
	I0813 00:23:57.962187  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:57.962104  716910 retry.go:31] will retry after 1.677229867s: waiting for machine to come up
	I0813 00:23:56.700526  716165 pod_ready.go:102] pod "kube-controller-manager-pause-20210813001951-679351" in "kube-system" namespace has status "Ready":"False"
	I0813 00:23:59.203502  716165 pod_ready.go:92] pod "kube-controller-manager-pause-20210813001951-679351" in "kube-system" namespace has status "Ready":"True"
	I0813 00:23:59.203528  716165 pod_ready.go:81] duration metric: took 23.531237761s waiting for pod "kube-controller-manager-pause-20210813001951-679351" in "kube-system" namespace to be "Ready" ...
	I0813 00:23:59.203548  716165 pod_ready.go:78] waiting up to 6m0s for pod "kube-proxy-2mkpr" in "kube-system" namespace to be "Ready" ...
	I0813 00:23:59.229946  716165 pod_ready.go:92] pod "kube-proxy-2mkpr" in "kube-system" namespace has status "Ready":"True"
	I0813 00:23:59.229972  716165 pod_ready.go:81] duration metric: took 26.415102ms waiting for pod "kube-proxy-2mkpr" in "kube-system" namespace to be "Ready" ...
	I0813 00:23:59.229988  716165 pod_ready.go:78] waiting up to 6m0s for pod "kube-scheduler-pause-20210813001951-679351" in "kube-system" namespace to be "Ready" ...
	I0813 00:23:59.239104  716165 pod_ready.go:92] pod "kube-scheduler-pause-20210813001951-679351" in "kube-system" namespace has status "Ready":"True"
	I0813 00:23:59.239127  716165 pod_ready.go:81] duration metric: took 9.130749ms waiting for pod "kube-scheduler-pause-20210813001951-679351" in "kube-system" namespace to be "Ready" ...
	I0813 00:23:59.239136  716165 pod_ready.go:38] duration metric: took 30.618753935s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0813 00:23:59.239158  716165 api_server.go:50] waiting for apiserver process to appear ...
	I0813 00:23:59.239214  716165 ssh_runner.go:149] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0813 00:23:59.256452  716165 api_server.go:70] duration metric: took 30.826512028s to wait for apiserver process to appear ...
	I0813 00:23:59.256483  716165 api_server.go:86] waiting for apiserver healthz status ...
	I0813 00:23:59.256496  716165 api_server.go:239] Checking apiserver healthz at https://192.168.127.196:8443/healthz ...
	I0813 00:23:59.264912  716165 api_server.go:265] https://192.168.127.196:8443/healthz returned 200:
	ok
	I0813 00:23:59.268135  716165 api_server.go:139] control plane version: v1.21.3
	I0813 00:23:59.268164  716165 api_server.go:129] duration metric: took 11.67345ms to wait for apiserver health ...
	I0813 00:23:59.268175  716165 system_pods.go:43] waiting for kube-system pods to appear ...
	I0813 00:23:59.276357  716165 system_pods.go:59] 7 kube-system pods found
	I0813 00:23:59.276426  716165 system_pods.go:61] "coredns-558bd4d5db-xjmwl" [5897a243-0289-4042-882a-d25cb005813b] Running
	I0813 00:23:59.276442  716165 system_pods.go:61] "etcd-pause-20210813001951-679351" [b35637c3-36e1-4d4b-b134-d8ff45654f0b] Running
	I0813 00:23:59.276449  716165 system_pods.go:61] "kube-apiserver-pause-20210813001951-679351" [85ba09e9-c18c-45e3-b17e-91c22905e23d] Running
	I0813 00:23:59.276456  716165 system_pods.go:61] "kube-controller-manager-pause-20210813001951-679351" [cecc2dae-a7a9-4055-804e-1b2eef4a2618] Running
	I0813 00:23:59.276466  716165 system_pods.go:61] "kube-proxy-2mkpr" [59d9290e-34c7-4e80-a909-8d989552ec78] Running
	I0813 00:23:59.276473  716165 system_pods.go:61] "kube-scheduler-pause-20210813001951-679351" [6b2abce1-c0bf-4c30-b455-05b50f0431fc] Running
	I0813 00:23:59.276482  716165 system_pods.go:61] "storage-provisioner" [b781b362-9644-4c96-a463-4cb61bc5ab58] Running
	I0813 00:23:59.276489  716165 system_pods.go:74] duration metric: took 8.308397ms to wait for pod list to return data ...
	I0813 00:23:59.276506  716165 default_sa.go:34] waiting for default service account to be created ...
	I0813 00:23:59.285171  716165 default_sa.go:45] found service account: "default"
	I0813 00:23:59.285197  716165 default_sa.go:55] duration metric: took 8.683697ms for default service account to be created ...
	I0813 00:23:59.285206  716165 system_pods.go:116] waiting for k8s-apps to be running ...
	I0813 00:23:59.292591  716165 system_pods.go:86] 7 kube-system pods found
	I0813 00:23:59.292661  716165 system_pods.go:89] "coredns-558bd4d5db-xjmwl" [5897a243-0289-4042-882a-d25cb005813b] Running
	I0813 00:23:59.292679  716165 system_pods.go:89] "etcd-pause-20210813001951-679351" [b35637c3-36e1-4d4b-b134-d8ff45654f0b] Running
	I0813 00:23:59.292686  716165 system_pods.go:89] "kube-apiserver-pause-20210813001951-679351" [85ba09e9-c18c-45e3-b17e-91c22905e23d] Running
	I0813 00:23:59.292693  716165 system_pods.go:89] "kube-controller-manager-pause-20210813001951-679351" [cecc2dae-a7a9-4055-804e-1b2eef4a2618] Running
	I0813 00:23:59.292699  716165 system_pods.go:89] "kube-proxy-2mkpr" [59d9290e-34c7-4e80-a909-8d989552ec78] Running
	I0813 00:23:59.292705  716165 system_pods.go:89] "kube-scheduler-pause-20210813001951-679351" [6b2abce1-c0bf-4c30-b455-05b50f0431fc] Running
	I0813 00:23:59.292710  716165 system_pods.go:89] "storage-provisioner" [b781b362-9644-4c96-a463-4cb61bc5ab58] Running
	I0813 00:23:59.292718  716165 system_pods.go:126] duration metric: took 7.505768ms to wait for k8s-apps to be running ...
	I0813 00:23:59.292732  716165 system_svc.go:44] waiting for kubelet service to be running ....
	I0813 00:23:59.292784  716165 ssh_runner.go:149] Run: sudo systemctl is-active --quiet service kubelet
	I0813 00:23:59.314742  716165 system_svc.go:56] duration metric: took 21.999634ms WaitForService to wait for kubelet.
	I0813 00:23:59.314836  716165 kubeadm.go:547] duration metric: took 30.884900181s to wait for : map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] ...
	I0813 00:23:59.314907  716165 node_conditions.go:102] verifying NodePressure condition ...
	I0813 00:23:59.334947  716165 node_conditions.go:122] node storage ephemeral capacity is 17784752Ki
	I0813 00:23:59.334986  716165 node_conditions.go:123] node cpu capacity is 2
	I0813 00:23:59.335009  716165 node_conditions.go:105] duration metric: took 20.073824ms to run NodePressure ...
	I0813 00:23:59.335022  716165 start.go:231] waiting for startup goroutines ...
	I0813 00:23:59.414520  716165 start.go:462] kubectl: 1.20.5, cluster: 1.21.3 (minor skew: 1)
	I0813 00:23:59.416641  716165 out.go:177] * Done! kubectl is now configured to use "pause-20210813001951-679351" cluster and "default" namespace by default
	I0813 00:23:56.504125  716004 system_pods.go:59] 1 kube-system pods found
	I0813 00:23:56.504172  716004 system_pods.go:61] "storage-provisioner" [baa2edae-fbcc-11eb-bfc1-52540060d0ea] Pending: PodScheduled:Unschedulable (0/1 nodes are available: 1 node(s) had taints that the pod didn't tolerate.)
	I0813 00:23:56.504191  716004 retry.go:31] will retry after 2.6934314s: only 1 pod(s) have shown up
	I0813 00:23:59.214548  716004 system_pods.go:59] 1 kube-system pods found
	I0813 00:23:59.214588  716004 system_pods.go:61] "storage-provisioner" [baa2edae-fbcc-11eb-bfc1-52540060d0ea] Pending: PodScheduled:Unschedulable (0/1 nodes are available: 1 node(s) had taints that the pod didn't tolerate.)
	I0813 00:23:59.214606  716004 retry.go:31] will retry after 2.494582248s: only 1 pod(s) have shown up
	I0813 00:23:59.640905  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | domain auto-20210813002105-679351 has defined MAC address 52:54:00:8e:56:53 in network mk-auto-20210813002105-679351
	I0813 00:23:59.641364  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | unable to find current IP address of domain auto-20210813002105-679351 in network mk-auto-20210813002105-679351
	I0813 00:23:59.641402  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:23:59.641276  716910 retry.go:31] will retry after 2.346016261s: waiting for machine to come up
	I0813 00:24:01.989042  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | domain auto-20210813002105-679351 has defined MAC address 52:54:00:8e:56:53 in network mk-auto-20210813002105-679351
	I0813 00:24:01.989580  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | unable to find current IP address of domain auto-20210813002105-679351 in network mk-auto-20210813002105-679351
	I0813 00:24:01.989606  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:24:01.989539  716910 retry.go:31] will retry after 3.36678925s: waiting for machine to come up
	I0813 00:24:04.629229  716004 system_pods.go:59] 2 kube-system pods found
	I0813 00:24:04.629270  716004 system_pods.go:61] "coredns-fb8b8dccf-stsv6" [c23d810d-fbcc-11eb-bfc1-52540060d0ea] Pending
	I0813 00:24:04.629277  716004 system_pods.go:61] "storage-provisioner" [baa2edae-fbcc-11eb-bfc1-52540060d0ea] Pending
	I0813 00:24:04.629286  716004 system_pods.go:74] duration metric: took 15.902582355s to wait for pod list to return data ...
	I0813 00:24:04.629298  716004 kubeadm.go:547] duration metric: took 16.63831269s to wait for : map[apiserver:true system_pods:true] ...
	I0813 00:24:04.629314  716004 node_conditions.go:102] verifying NodePressure condition ...
	I0813 00:24:04.723771  716004 node_conditions.go:122] node storage ephemeral capacity is 17784752Ki
	I0813 00:24:04.723817  716004 node_conditions.go:123] node cpu capacity is 2
	I0813 00:24:04.723835  716004 node_conditions.go:105] duration metric: took 94.514701ms to run NodePressure ...
	I0813 00:24:04.723848  716004 start.go:231] waiting for startup goroutines ...
	I0813 00:24:04.803131  716004 start.go:462] kubectl: 1.20.5, cluster: 1.14.0 (minor skew: 6)
	I0813 00:24:04.805595  716004 out.go:177] 
	W0813 00:24:04.805823  716004 out.go:242] ! /usr/local/bin/kubectl is version 1.20.5, which may have incompatibilites with Kubernetes 1.14.0.
	I0813 00:24:04.807470  716004 out.go:177]   - Want kubectl v1.14.0? Try 'minikube kubectl -- get pods -A'
	I0813 00:24:04.809337  716004 out.go:177] * Done! kubectl is now configured to use "kubernetes-upgrade-20210813002240-679351" cluster and "default" namespace by default
	I0813 00:24:05.357959  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | domain auto-20210813002105-679351 has defined MAC address 52:54:00:8e:56:53 in network mk-auto-20210813002105-679351
	I0813 00:24:05.358449  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | unable to find current IP address of domain auto-20210813002105-679351 in network mk-auto-20210813002105-679351
	I0813 00:24:05.358483  716767 main.go:130] libmachine: (auto-20210813002105-679351) DBG | I0813 00:24:05.358397  716910 retry.go:31] will retry after 3.11822781s: waiting for machine to come up
	
	* 
	* ==> container status <==
	* CONTAINER           IMAGE               CREATED             STATE               NAME                      ATTEMPT             POD ID
	a4a4ec1132e56       bc2bb319a7038       25 seconds ago      Running             kube-controller-manager   2                   a8ae77b235803
	7899222a89a71       6e38f40d628db       40 seconds ago      Running             storage-provisioner       0                   4a6e63928b11f
	d30696c5405d8       296a6d5035e2d       52 seconds ago      Running             coredns                   1                   f5e6ffa407fcd
	6204b21ab9c2c       adb2816ea823a       54 seconds ago      Running             kube-proxy                1                   46f36163c53e1
	705c524b7bd2d       0369cf4303ffd       54 seconds ago      Running             etcd                      1                   accc3895e9638
	64c935095bced       6be0dc1302e30       54 seconds ago      Running             kube-scheduler            1                   2c9091a6bd8d7
	85bd885bbae1e       3d174f00aa39e       55 seconds ago      Running             kube-apiserver            1                   bb4e7930b2ada
	9cb0b80b9734a       bc2bb319a7038       55 seconds ago      Exited              kube-controller-manager   1                   a8ae77b235803
	efb6b8992aa82       296a6d5035e2d       2 minutes ago       Exited              coredns                   0                   36cd319da9139
	c11b8a977685b       adb2816ea823a       2 minutes ago       Exited              kube-proxy                0                   7d0f0371d8768
	2efebee19d7a6       0369cf4303ffd       2 minutes ago       Exited              etcd                      0                   54509c8ec15d2
	3874ae5baf2d8       3d174f00aa39e       2 minutes ago       Exited              kube-apiserver            0                   c77d72f80a55b
	3afc8c09f8286       6be0dc1302e30       2 minutes ago       Exited              kube-scheduler            0                   1bd10dcb9a7ea
	
	* 
	* ==> containerd <==
	* -- Logs begin at Fri 2021-08-13 00:20:51 UTC, end at Fri 2021-08-13 00:24:10 UTC. --
	Aug 13 00:23:18 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:18.421885649Z" level=info msg="StartContainer for \"6204b21ab9c2c6e799da39874e9eb93e39284c65231dd05603088db2fa6b8e6b\" returns successfully"
	Aug 13 00:23:18 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:18.634895282Z" level=info msg="StartContainer for \"d30696c5405d8e3fbc2bfe7ef7e391b98c301d6056d08f1d32a9614f101edc6f\" returns successfully"
	Aug 13 00:23:27 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:27.651258400Z" level=info msg="Finish piping stderr of container \"9cb0b80b9734ad17c461045a38b0d0a6b8d8686ef46bf8dd99a00d62b8cc67fe\""
	Aug 13 00:23:27 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:27.652374051Z" level=info msg="Finish piping stdout of container \"9cb0b80b9734ad17c461045a38b0d0a6b8d8686ef46bf8dd99a00d62b8cc67fe\""
	Aug 13 00:23:27 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:27.654645594Z" level=info msg="TaskExit event &TaskExit{ContainerID:9cb0b80b9734ad17c461045a38b0d0a6b8d8686ef46bf8dd99a00d62b8cc67fe,ID:9cb0b80b9734ad17c461045a38b0d0a6b8d8686ef46bf8dd99a00d62b8cc67fe,Pid:4197,ExitStatus:255,ExitedAt:2021-08-13 00:23:27.653997105 +0000 UTC,XXX_unrecognized:[],}"
	Aug 13 00:23:27 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:27.765885334Z" level=info msg="shim disconnected" id=9cb0b80b9734ad17c461045a38b0d0a6b8d8686ef46bf8dd99a00d62b8cc67fe
	Aug 13 00:23:27 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:27.766396530Z" level=error msg="copy shim log" error="read /proc/self/fd/58: file already closed"
	Aug 13 00:23:28 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:28.010584031Z" level=info msg="RemoveContainer for \"112c2918d72a90f4b0bbe9d6e1b3134149bf89a77f20de44d4059a1ad6edeff4\""
	Aug 13 00:23:28 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:28.021541263Z" level=info msg="RemoveContainer for \"112c2918d72a90f4b0bbe9d6e1b3134149bf89a77f20de44d4059a1ad6edeff4\" returns successfully"
	Aug 13 00:23:29 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:29.821371568Z" level=info msg="RunPodsandbox for &PodSandboxMetadata{Name:storage-provisioner,Uid:b781b362-9644-4c96-a463-4cb61bc5ab58,Namespace:kube-system,Attempt:0,}"
	Aug 13 00:23:29 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:29.875382976Z" level=info msg="starting signal loop" namespace=k8s.io path=/run/containerd/io.containerd.runtime.v2.task/k8s.io/4a6e63928b11fedafa692c25459f3246b10afd3d4379e021576f2665ed408565 pid=4636
	Aug 13 00:23:30 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:30.445012017Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:storage-provisioner,Uid:b781b362-9644-4c96-a463-4cb61bc5ab58,Namespace:kube-system,Attempt:0,} returns sandbox id \"4a6e63928b11fedafa692c25459f3246b10afd3d4379e021576f2665ed408565\""
	Aug 13 00:23:30 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:30.455717251Z" level=info msg="CreateContainer within sandbox \"4a6e63928b11fedafa692c25459f3246b10afd3d4379e021576f2665ed408565\" for container &ContainerMetadata{Name:storage-provisioner,Attempt:0,}"
	Aug 13 00:23:30 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:30.529886160Z" level=info msg="CreateContainer within sandbox \"4a6e63928b11fedafa692c25459f3246b10afd3d4379e021576f2665ed408565\" for &ContainerMetadata{Name:storage-provisioner,Attempt:0,} returns container id \"7899222a89a71ffe4fb8d232a5b3799b5c94ea7e430406ce44654a4c80946ed9\""
	Aug 13 00:23:30 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:30.531938735Z" level=info msg="StartContainer for \"7899222a89a71ffe4fb8d232a5b3799b5c94ea7e430406ce44654a4c80946ed9\""
	Aug 13 00:23:30 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:30.785895579Z" level=info msg="StartContainer for \"7899222a89a71ffe4fb8d232a5b3799b5c94ea7e430406ce44654a4c80946ed9\" returns successfully"
	Aug 13 00:23:45 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:45.128731300Z" level=info msg="CreateContainer within sandbox \"a8ae77b235803b4e19e0eb0a6e8e4d70a30100102282506be869694a0b95d264\" for container &ContainerMetadata{Name:kube-controller-manager,Attempt:2,}"
	Aug 13 00:23:45 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:45.216530954Z" level=info msg="CreateContainer within sandbox \"a8ae77b235803b4e19e0eb0a6e8e4d70a30100102282506be869694a0b95d264\" for &ContainerMetadata{Name:kube-controller-manager,Attempt:2,} returns container id \"a4a4ec1132e56e93b77cf82333943e5daaab8efa48df65ced586f5a324cde37d\""
	Aug 13 00:23:45 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:45.221615537Z" level=info msg="StartContainer for \"a4a4ec1132e56e93b77cf82333943e5daaab8efa48df65ced586f5a324cde37d\""
	Aug 13 00:23:45 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:45.473360047Z" level=info msg="StartContainer for \"a4a4ec1132e56e93b77cf82333943e5daaab8efa48df65ced586f5a324cde37d\" returns successfully"
	Aug 13 00:23:54 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:54.024224370Z" level=info msg="StopPodSandbox for \"010dd466bc9609853f86415ca26b64dbc3754ff0bcb704d6a8abdf03248fe11a\""
	Aug 13 00:23:54 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:54.024332361Z" level=info msg="TearDown network for sandbox \"010dd466bc9609853f86415ca26b64dbc3754ff0bcb704d6a8abdf03248fe11a\" successfully"
	Aug 13 00:23:54 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:54.024344869Z" level=info msg="StopPodSandbox for \"010dd466bc9609853f86415ca26b64dbc3754ff0bcb704d6a8abdf03248fe11a\" returns successfully"
	Aug 13 00:23:54 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:54.026182269Z" level=info msg="RemovePodSandbox for \"010dd466bc9609853f86415ca26b64dbc3754ff0bcb704d6a8abdf03248fe11a\""
	Aug 13 00:23:54 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:54.033764742Z" level=info msg="RemovePodSandbox \"010dd466bc9609853f86415ca26b64dbc3754ff0bcb704d6a8abdf03248fe11a\" returns successfully"
	
	* 
	* ==> coredns [d30696c5405d8e3fbc2bfe7ef7e391b98c301d6056d08f1d32a9614f101edc6f] <==
	* [INFO] plugin/ready: Still waiting on: "kubernetes"
	.:53
	[INFO] plugin/reload: Running configuration MD5 = 21fa5447a9370c672668c17fadc8028a
	CoreDNS-1.8.0
	linux/amd64, go1.15.3, 054c9ae
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	
	* 
	* ==> coredns [efb6b8992aa826974c98985c6dbeb065b7a93d6ceeacacae98b06ec18bbfd5bb] <==
	* I0813 00:22:38.694893       1 trace.go:205] Trace[2019727887]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.19.2/tools/cache/reflector.go:156 (13-Aug-2021 00:22:08.692) (total time: 30002ms):
	Trace[2019727887]: [30.002199421s] [30.002199421s] END
	E0813 00:22:38.695325       1 reflector.go:127] pkg/mod/k8s.io/client-go@v0.19.2/tools/cache/reflector.go:156: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0813 00:22:38.695485       1 trace.go:205] Trace[911902081]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.19.2/tools/cache/reflector.go:156 (13-Aug-2021 00:22:08.694) (total time: 30001ms):
	Trace[911902081]: [30.001141967s] [30.001141967s] END
	E0813 00:22:38.695755       1 reflector.go:127] pkg/mod/k8s.io/client-go@v0.19.2/tools/cache/reflector.go:156: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0813 00:22:38.695034       1 trace.go:205] Trace[1427131847]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.19.2/tools/cache/reflector.go:156 (13-Aug-2021 00:22:08.694) (total time: 30000ms):
	Trace[1427131847]: [30.000422913s] [30.000422913s] END
	E0813 00:22:38.696466       1 reflector.go:127] pkg/mod/k8s.io/client-go@v0.19.2/tools/cache/reflector.go:156: Failed to watch *v1.Endpoints: failed to list *v1.Endpoints: Get "https://10.96.0.1:443/api/v1/endpoints?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	.:53
	[INFO] plugin/reload: Running configuration MD5 = db32ca3650231d74073ff4cf814959a7
	CoreDNS-1.8.0
	linux/amd64, go1.15.3, 054c9ae
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] Reloading
	[INFO] plugin/health: Going into lameduck mode for 5s
	[INFO] plugin/reload: Running configuration MD5 = 21fa5447a9370c672668c17fadc8028a
	[INFO] Reloading complete
	
	* 
	* ==> describe nodes <==
	* Name:               pause-20210813001951-679351
	Roles:              control-plane,master
	Labels:             beta.kubernetes.io/arch=amd64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=amd64
	                    kubernetes.io/hostname=pause-20210813001951-679351
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=dc1c3ca26e9449ce488a773126b8450402c94a19
	                    minikube.k8s.io/name=pause-20210813001951-679351
	                    minikube.k8s.io/updated_at=2021_08_13T00_21_48_0700
	                    minikube.k8s.io/version=v1.22.0
	                    node-role.kubernetes.io/control-plane=
	                    node-role.kubernetes.io/master=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: /run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Fri, 13 Aug 2021 00:21:44 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  pause-20210813001951-679351
	  AcquireTime:     <unset>
	  RenewTime:       Fri, 13 Aug 2021 00:23:57 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Fri, 13 Aug 2021 00:21:57 +0000   Fri, 13 Aug 2021 00:21:38 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Fri, 13 Aug 2021 00:21:57 +0000   Fri, 13 Aug 2021 00:21:38 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Fri, 13 Aug 2021 00:21:57 +0000   Fri, 13 Aug 2021 00:21:38 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Fri, 13 Aug 2021 00:21:57 +0000   Fri, 13 Aug 2021 00:21:57 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.127.196
	  Hostname:    pause-20210813001951-679351
	Capacity:
	  cpu:                2
	  ephemeral-storage:  17784752Ki
	  hugepages-2Mi:      0
	  memory:             2033024Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  17784752Ki
	  hugepages-2Mi:      0
	  memory:             2033024Ki
	  pods:               110
	System Info:
	  Machine ID:                 9109403b29744bfb99029b25cc4f9da7
	  System UUID:                9109403b-2974-4bfb-9902-9b25cc4f9da7
	  Boot ID:                    0d1f634d-bf20-456d-8420-8e644eba3e38
	  Kernel Version:             4.19.182
	  OS Image:                   Buildroot 2020.02.12
	  Operating System:           linux
	  Architecture:               amd64
	  Container Runtime Version:  containerd://1.4.9
	  Kubelet Version:            v1.21.3
	  Kube-Proxy Version:         v1.21.3
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (7 in total)
	  Namespace                   Name                                                   CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                                   ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-558bd4d5db-xjmwl                               100m (5%!)(MISSING)     0 (0%!)(MISSING)      70Mi (3%!)(MISSING)        170Mi (8%!)(MISSING)     2m6s
	  kube-system                 etcd-pause-20210813001951-679351                       100m (5%!)(MISSING)     0 (0%!)(MISSING)      100Mi (5%!)(MISSING)       0 (0%!)(MISSING)         2m26s
	  kube-system                 kube-apiserver-pause-20210813001951-679351             250m (12%!)(MISSING)    0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         2m17s
	  kube-system                 kube-controller-manager-pause-20210813001951-679351    200m (10%!)(MISSING)    0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         2m17s
	  kube-system                 kube-proxy-2mkpr                                       0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         2m6s
	  kube-system                 kube-scheduler-pause-20210813001951-679351             100m (5%!)(MISSING)     0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         2m17s
	  kube-system                 storage-provisioner                                    0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         42s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                750m (37%!)(MISSING)  0 (0%!)(MISSING)
	  memory             170Mi (8%!)(MISSING)  170Mi (8%!)(MISSING)
	  ephemeral-storage  0 (0%!)(MISSING)      0 (0%!)(MISSING)
	  hugepages-2Mi      0 (0%!)(MISSING)      0 (0%!)(MISSING)
	Events:
	  Type    Reason                   Age                    From        Message
	  ----    ------                   ----                   ----        -------
	  Normal  NodeHasSufficientMemory  2m39s (x6 over 2m39s)  kubelet     Node pause-20210813001951-679351 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    2m39s (x6 over 2m39s)  kubelet     Node pause-20210813001951-679351 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     2m39s (x5 over 2m39s)  kubelet     Node pause-20210813001951-679351 status is now: NodeHasSufficientPID
	  Normal  Starting                 2m18s                  kubelet     Starting kubelet.
	  Normal  NodeHasSufficientMemory  2m17s                  kubelet     Node pause-20210813001951-679351 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    2m17s                  kubelet     Node pause-20210813001951-679351 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     2m17s                  kubelet     Node pause-20210813001951-679351 status is now: NodeHasSufficientPID
	  Normal  NodeAllocatableEnforced  2m17s                  kubelet     Updated Node Allocatable limit across pods
	  Normal  NodeReady                2m14s                  kubelet     Node pause-20210813001951-679351 status is now: NodeReady
	  Normal  Starting                 2m3s                   kube-proxy  Starting kube-proxy.
	  Normal  Starting                 44s                    kube-proxy  Starting kube-proxy.
	
	* 
	* ==> dmesg <==
	*               on the kernel command line
	[  +0.000130] platform regulatory.0: Direct firmware load for regulatory.db failed with error -2
	[  +4.671269] systemd-fstab-generator[1160]: Ignoring "noauto" for root device
	[  +0.045892] systemd[1]: system-getty.slice: unit configures an IP firewall, but the local system does not support BPF/cgroup firewalling.
	[  +0.000003] systemd[1]: (This warning is only shown for the first unit using IP firewalling.)
	[  +1.192244] SELinux: unrecognized netlink message: protocol=0 nlmsg_type=106 sclass=netlink_route_socket pid=1717 comm=systemd-network
	[  +2.439853] NFSD: the nfsdcld client tracking upcall will be removed in 3.10. Please transition to using nfsdcltrack.
	[  +2.154553] vboxguest: loading out-of-tree module taints kernel.
	[  +0.006032] vboxguest: PCI device not found, probably running on physical hardware.
	[Aug13 00:21] systemd-fstab-generator[2094]: Ignoring "noauto" for root device
	[  +0.270933] systemd-fstab-generator[2126]: Ignoring "noauto" for root device
	[  +0.157086] systemd-fstab-generator[2141]: Ignoring "noauto" for root device
	[  +0.205044] systemd-fstab-generator[2172]: Ignoring "noauto" for root device
	[  +8.129227] systemd-fstab-generator[2377]: Ignoring "noauto" for root device
	[ +21.150444] systemd-fstab-generator[2810]: Ignoring "noauto" for root device
	[Aug13 00:22] kauditd_printk_skb: 38 callbacks suppressed
	[ +41.503227] kauditd_printk_skb: 65 callbacks suppressed
	[  +6.452067] NFSD: Unable to end grace period: -110
	[Aug13 00:23] systemd-fstab-generator[3489]: Ignoring "noauto" for root device
	[  +0.238108] systemd-fstab-generator[3502]: Ignoring "noauto" for root device
	[  +0.253277] systemd-fstab-generator[3527]: Ignoring "noauto" for root device
	[ +17.104028] kauditd_printk_skb: 29 callbacks suppressed
	[ +32.355407] systemd-fstab-generator[4902]: Ignoring "noauto" for root device
	[Aug13 00:24] systemd-fstab-generator[5070]: Ignoring "noauto" for root device
	[  +3.299751] systemd-fstab-generator[5099]: Ignoring "noauto" for root device
	
	* 
	* ==> etcd [2efebee19d7a6bd77fd1333dab2cc543c575e8c6babdae865b90a1cf0fa48744] <==
	* 2021-08-13 00:22:07.230197 W | etcdserver: read-only range request "key:\"/registry/endpointslices/kube-system/kube-dns-5fjzs\" " with result "range_response_count:1 size:1013" took too long (671.993662ms) to execute
	2021-08-13 00:22:07.230468 W | etcdserver: read-only range request "key:\"/registry/serviceaccounts/kube-system/coredns\" " with result "range_response_count:1 size:217" took too long (741.585441ms) to execute
	2021-08-13 00:22:14.207544 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2021-08-13 00:22:24.208238 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2021-08-13 00:22:34.210334 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2021-08-13 00:22:45.207896 W | etcdserver/api/etcdhttp: /health error; QGET failed etcdserver: request timed out (status code 503)
	2021-08-13 00:22:45.919806 W | etcdserver: read-only range request "key:\"/registry/health\" " with result "error:context deadline exceeded" took too long (2.000045576s) to execute
	2021-08-13 00:22:47.101521 W | wal: sync duration of 4.051774181s, expected less than 1s
	2021-08-13 00:22:47.103383 W | etcdserver: read-only range request "key:\"/registry/pods/kube-system/coredns-558bd4d5db-xjmwl\" " with result "range_response_count:1 size:4746" took too long (3.677716425s) to execute
	2021-08-13 00:22:47.104500 W | etcdserver: read-only range request "key:\"/registry/namespaces/kube-system\" " with result "range_response_count:1 size:351" took too long (2.60570306s) to execute
	2021-08-13 00:22:47.104825 W | etcdserver: read-only range request "key:\"/registry/namespaces/default\" " with result "range_response_count:1 size:341" took too long (894.799494ms) to execute
	2021-08-13 00:22:47.105989 W | etcdserver: read-only range request "key:\"/registry/pods/kube-system/coredns-558bd4d5db-xjmwl\" " with result "range_response_count:1 size:4746" took too long (1.568667767s) to execute
	2021-08-13 00:22:47.106790 W | etcdserver: read-only range request "key:\"/registry/health\" " with result "range_response_count:0 size:5" took too long (1.176235696s) to execute
	2021-08-13 00:22:47.107462 W | etcdserver: read-only range request "key:\"/registry/flowschemas/exempt\" " with result "range_response_count:1 size:879" took too long (2.454270208s) to execute
	2021-08-13 00:22:47.950458 W | etcdserver: request "header:<ID:12242045188531646344 username:\"kube-apiserver-etcd-client\" auth_revision:1 > lease_grant:<ttl:3660-second id:29e47b3ce2b56f87>" with result "size:41" took too long (470.318775ms) to execute
	2021-08-13 00:22:47.951739 W | etcdserver: read-only range request "key:\"/registry/namespaces/kube-node-lease\" " with result "range_response_count:1 size:363" took too long (801.202969ms) to execute
	2021-08-13 00:22:47.955016 W | etcdserver: read-only range request "key:\"/registry/prioritylevelconfigurations/exempt\" " with result "range_response_count:1 size:371" took too long (804.019176ms) to execute
	2021-08-13 00:22:47.956311 W | etcdserver: read-only range request "key:\"/registry/pods/kube-system/coredns-558bd4d5db-xjmwl\" " with result "range_response_count:1 size:4568" took too long (530.617681ms) to execute
	2021-08-13 00:22:47.958454 W | etcdserver: read-only range request "key:\"/registry/priorityclasses/\" range_end:\"/registry/priorityclasses0\" count_only:true " with result "range_response_count:0 size:7" took too long (309.016563ms) to execute
	2021-08-13 00:22:48.801008 W | etcdserver: read-only range request "key:\"/registry/minions/pause-20210813001951-679351\" " with result "range_response_count:1 size:4776" took too long (768.144463ms) to execute
	2021-08-13 00:22:48.802711 W | etcdserver: read-only range request "key:\"/registry/controllers/\" range_end:\"/registry/controllers0\" count_only:true " with result "range_response_count:0 size:5" took too long (347.906261ms) to execute
	2021-08-13 00:22:49.019606 W | etcdserver: read-only range request "key:\"/registry/serviceaccounts/default/\" range_end:\"/registry/serviceaccounts/default0\" " with result "range_response_count:1 size:209" took too long (111.551006ms) to execute
	2021-08-13 00:22:49.020521 W | etcdserver: read-only range request "key:\"/registry/health\" " with result "range_response_count:0 size:5" took too long (100.648686ms) to execute
	2021-08-13 00:22:54.217451 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2021-08-13 00:23:04.208424 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	
	* 
	* ==> etcd [705c524b7bd2d071e133ec74fb1f433cab624312145e8e0d2e4b19e7936be85b] <==
	* 2021-08-13 00:23:25.998767 W | etcdserver: read-only range request "key:\"/registry/leases/kube-node-lease/pause-20210813001951-679351\" " with result "range_response_count:1 size:671" took too long (1.83123856s) to execute
	2021-08-13 00:23:25.999024 W | etcdserver: read-only range request "key:\"/registry/pods/kube-system/kube-apiserver-pause-20210813001951-679351\" " with result "range_response_count:1 size:6537" took too long (1.83134851s) to execute
	2021-08-13 00:23:25.999710 W | etcdserver: read-only range request "key:\"/registry/services/endpoints/default/kubernetes\" " with result "range_response_count:1 size:423" took too long (1.836183835s) to execute
	2021-08-13 00:23:26.000315 W | etcdserver: read-only range request "key:\"/registry/serviceaccounts/kube-system/kube-proxy\" " with result "range_response_count:1 size:226" took too long (1.856850168s) to execute
	2021-08-13 00:23:26.003298 W | etcdserver: read-only range request "key:\"/registry/prioritylevelconfigurations/exempt\" " with result "range_response_count:1 size:371" took too long (1.862496869s) to execute
	2021-08-13 00:23:26.866787 W | etcdserver: read-only range request "key:\"/registry/clusterrolebindings/\" range_end:\"/registry/clusterrolebindings0\" " with result "range_response_count:50 size:37065" took too long (849.021882ms) to execute
	2021-08-13 00:23:26.868169 W | etcdserver: request "header:<ID:12242045188557790365 username:\"kube-apiserver-etcd-client\" auth_revision:1 > txn:<compare:<target:MOD key:\"/registry/leases/kube-node-lease/pause-20210813001951-679351\" mod_revision:486 > success:<request_put:<key:\"/registry/leases/kube-node-lease/pause-20210813001951-679351\" value_size:587 >> failure:<request_range:<key:\"/registry/leases/kube-node-lease/pause-20210813001951-679351\" > >>" with result "size:16" took too long (533.569848ms) to execute
	2021-08-13 00:23:26.875022 W | etcdserver: read-only range request "key:\"/registry/health\" " with result "range_response_count:0 size:5" took too long (380.351682ms) to execute
	2021-08-13 00:23:26.888255 W | etcdserver: read-only range request "key:\"/registry/flowschemas/exempt\" " with result "range_response_count:1 size:879" took too long (866.035216ms) to execute
	2021-08-13 00:23:26.891763 W | etcdserver: read-only range request "key:\"/registry/minions/pause-20210813001951-679351\" " with result "range_response_count:1 size:4776" took too long (869.868576ms) to execute
	2021-08-13 00:23:26.892452 W | etcdserver: read-only range request "key:\"/registry/masterleases/\" range_end:\"/registry/masterleases0\" " with result "range_response_count:0 size:5" took too long (870.451036ms) to execute
	2021-08-13 00:23:26.892806 W | etcdserver: read-only range request "key:\"/registry/priorityclasses/system-cluster-critical\" " with result "range_response_count:1 size:476" took too long (870.772194ms) to execute
	2021-08-13 00:23:26.893782 W | etcdserver: read-only range request "key:\"/registry/health\" " with result "range_response_count:0 size:5" took too long (353.420112ms) to execute
	2021-08-13 00:23:27.618510 W | etcdserver: read-only range request "key:\"/registry/events/kube-system/kube-proxy-2mkpr.169ab5d1b9f0872d\" " with result "range_response_count:1 size:826" took too long (312.125535ms) to execute
	2021-08-13 00:23:27.618804 W | etcdserver: read-only range request "key:\"/registry/clusterroles/system:controller:certificate-controller\" " with result "range_response_count:1 size:1142" took too long (324.375348ms) to execute
	2021-08-13 00:23:27.619230 W | etcdserver: read-only range request "key:\"/registry/health\" " with result "range_response_count:0 size:5" took too long (251.508486ms) to execute
	2021-08-13 00:23:35.057151 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2021-08-13 00:23:44.208010 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2021-08-13 00:23:54.207487 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2021-08-13 00:24:03.753011 W | etcdserver: read-only range request "key:\"/registry/services/endpoints/kube-system/k8s.io-minikube-hostpath\" " with result "range_response_count:1 size:1125" took too long (341.385574ms) to execute
	WARNING: 2021/08/13 00:24:07 grpc: Server.processUnaryRPC failed to write status: connection error: desc = "transport is closing"
	2021-08-13 00:24:09.122960 W | wal: sync duration of 3.322047764s, expected less than 1s
	2021-08-13 00:24:09.124505 W | etcdserver: read-only range request "key:\"/registry/namespaces/default\" " with result "range_response_count:1 size:341" took too long (1.247717726s) to execute
	2021-08-13 00:24:09.125866 W | etcdserver: read-only range request "key:\"/registry/services/endpoints/kube-system/k8s.io-minikube-hostpath\" " with result "range_response_count:1 size:1125" took too long (1.315223316s) to execute
	2021-08-13 00:24:09.686753 W | etcdserver: request "header:<ID:12242045188557790983 username:\"kube-apiserver-etcd-client\" auth_revision:1 > txn:<compare:<target:MOD key:\"/registry/masterleases/192.168.127.196\" mod_revision:580 > success:<request_put:<key:\"/registry/masterleases/192.168.127.196\" value_size:70 lease:3018673151703015173 >> failure:<request_range:<key:\"/registry/masterleases/192.168.127.196\" > >>" with result "size:16" took too long (348.846917ms) to execute
	
	* 
	* ==> kernel <==
	*  00:24:11 up 3 min,  0 users,  load average: 1.77, 0.97, 0.39
	Linux pause-20210813001951-679351 4.19.182 #1 SMP Fri Aug 6 09:11:32 UTC 2021 x86_64 GNU/Linux
	PRETTY_NAME="Buildroot 2020.02.12"
	
	* 
	* ==> kube-apiserver [3874ae5baf2d856570fa5534f52778b464323afbd92eca54de5a983517dbbb65] <==
	* Trace[265158055]: [552.827013ms] [552.827013ms] END
	I0813 00:22:48.803967       1 trace.go:205] Trace[40150974]: "GuaranteedUpdate etcd3" type:*apps.Deployment (13-Aug-2021 00:22:48.035) (total time: 768ms):
	Trace[40150974]: ---"Transaction committed" 766ms (00:22:00.803)
	Trace[40150974]: [768.574416ms] [768.574416ms] END
	I0813 00:22:48.805449       1 trace.go:205] Trace[38850226]: "Update" url:/apis/apps/v1/namespaces/kube-system/deployments/coredns/status,user-agent:kube-controller-manager/v1.21.3 (linux/amd64) kubernetes/ca643a4/system:serviceaccount:kube-system:deployment-controller,client:192.168.127.196,accept:application/vnd.kubernetes.protobuf, */*,protocol:HTTP/2.0 (13-Aug-2021 00:22:48.034) (total time: 770ms):
	Trace[38850226]: ---"Object stored in database" 769ms (00:22:00.805)
	Trace[38850226]: [770.627081ms] [770.627081ms] END
	I0813 00:22:48.807911       1 trace.go:205] Trace[1153692894]: "GuaranteedUpdate etcd3" type:*discovery.EndpointSlice (13-Aug-2021 00:22:48.029) (total time: 777ms):
	Trace[1153692894]: ---"Transaction committed" 776ms (00:22:00.807)
	Trace[1153692894]: [777.942281ms] [777.942281ms] END
	I0813 00:22:48.811870       1 trace.go:205] Trace[2055050687]: "Get" url:/api/v1/nodes/pause-20210813001951-679351,user-agent:minikube-linux-amd64/v0.0.0 (linux/amd64) kubernetes/$Format,client:192.168.127.1,accept:application/json, */*,protocol:HTTP/2.0 (13-Aug-2021 00:22:48.030) (total time: 780ms):
	Trace[2055050687]: ---"About to write a response" 779ms (00:22:00.810)
	Trace[2055050687]: [780.446725ms] [780.446725ms] END
	I0813 00:22:48.815873       1 trace.go:205] Trace[1709682153]: "Update" url:/apis/discovery.k8s.io/v1/namespaces/kube-system/endpointslices/kube-dns-5fjzs,user-agent:kube-controller-manager/v1.21.3 (linux/amd64) kubernetes/ca643a4/system:serviceaccount:kube-system:endpointslice-controller,client:192.168.127.196,accept:application/vnd.kubernetes.protobuf, */*,protocol:HTTP/2.0 (13-Aug-2021 00:22:48.029) (total time: 786ms):
	Trace[1709682153]: ---"Object stored in database" 785ms (00:22:00.815)
	Trace[1709682153]: [786.028644ms] [786.028644ms] END
	I0813 00:22:48.812641       1 trace.go:205] Trace[1460083222]: "GuaranteedUpdate etcd3" type:*core.Endpoints (13-Aug-2021 00:22:48.029) (total time: 783ms):
	Trace[1460083222]: ---"Transaction committed" 782ms (00:22:00.812)
	Trace[1460083222]: [783.305236ms] [783.305236ms] END
	I0813 00:22:48.819982       1 trace.go:205] Trace[920420538]: "Update" url:/api/v1/namespaces/kube-system/endpoints/kube-dns,user-agent:kube-controller-manager/v1.21.3 (linux/amd64) kubernetes/ca643a4/system:serviceaccount:kube-system:endpoint-controller,client:192.168.127.196,accept:application/vnd.kubernetes.protobuf, */*,protocol:HTTP/2.0 (13-Aug-2021 00:22:48.029) (total time: 790ms):
	Trace[920420538]: ---"Object stored in database" 790ms (00:22:00.819)
	Trace[920420538]: [790.783982ms] [790.783982ms] END
	I0813 00:23:04.050873       1 client.go:360] parsed scheme: "passthrough"
	I0813 00:23:04.051445       1 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{https://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
	I0813 00:23:04.051754       1 clientconn.go:948] ClientConn switching balancer to "pick_first"
	
	* 
	* ==> kube-apiserver [85bd885bbae1eb206d16fda70dde9e78726f0563495bc0c5f64cf2083b9a7bf7] <==
	* I0813 00:23:26.905285       1 storage_scheduling.go:148] all system priority classes are created successfully or already exist.
	I0813 00:23:29.331854       1 controller.go:611] quota admission added evaluator for: serviceaccounts
	I0813 00:23:29.366533       1 controller.go:611] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0813 00:23:29.384370       1 controller.go:611] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0813 00:23:29.403160       1 controller.go:611] quota admission added evaluator for: endpoints
	I0813 00:23:29.518332       1 controller.go:611] quota admission added evaluator for: events.events.k8s.io
	I0813 00:23:57.987189       1 controller.go:611] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0813 00:24:01.430291       1 client.go:360] parsed scheme: "passthrough"
	I0813 00:24:01.430491       1 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{https://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
	I0813 00:24:01.430526       1 clientconn.go:948] ClientConn switching balancer to "pick_first"
	E0813 00:24:07.259737       1 status.go:71] apiserver received an error that is not an metav1.Status: &errors.errorString{s:"context canceled"}: context canceled
	E0813 00:24:07.259860       1 status.go:71] apiserver received an error that is not an metav1.Status: &errors.errorString{s:"client disconnected"}: client disconnected
	E0813 00:24:07.263006       1 writers.go:117] apiserver was unable to write a JSON response: http: Handler timeout
	E0813 00:24:07.263568       1 wrap.go:54] timeout or abort while handling: GET "/apis/storage.k8s.io/v1/csinodes/pause-20210813001951-679351"
	E0813 00:24:07.265357       1 status.go:71] apiserver received an error that is not an metav1.Status: &errors.errorString{s:"http: Handler timeout"}: http: Handler timeout
	E0813 00:24:07.268867       1 writers.go:130] apiserver was unable to write a fallback JSON response: http: Handler timeout
	I0813 00:24:09.127818       1 trace.go:205] Trace[986017853]: "Get" url:/api/v1/namespaces/default,user-agent:kube-apiserver/v1.21.3 (linux/amd64) kubernetes/ca643a4,client:127.0.0.1,accept:application/vnd.kubernetes.protobuf, */*,protocol:HTTP/2.0 (13-Aug-2021 00:24:07.874) (total time: 1253ms):
	Trace[986017853]: ---"About to write a response" 1253ms (00:24:00.127)
	Trace[986017853]: [1.253446276s] [1.253446276s] END
	I0813 00:24:09.129569       1 trace.go:205] Trace[1940889522]: "Get" url:/api/v1/namespaces/kube-system/endpoints/k8s.io-minikube-hostpath,user-agent:storage-provisioner/v0.0.0 (linux/amd64) kubernetes/$Format,client:192.168.127.196,accept:application/json, */*,protocol:HTTP/2.0 (13-Aug-2021 00:24:07.809) (total time: 1319ms):
	Trace[1940889522]: ---"About to write a response" 1319ms (00:24:00.129)
	Trace[1940889522]: [1.319757496s] [1.319757496s] END
	I0813 00:24:09.688489       1 trace.go:205] Trace[1106331583]: "GuaranteedUpdate etcd3" type:*v1.Endpoints (13-Aug-2021 00:24:09.160) (total time: 527ms):
	Trace[1106331583]: ---"Transaction committed" 523ms (00:24:00.688)
	Trace[1106331583]: [527.566261ms] [527.566261ms] END
	
	* 
	* ==> kube-controller-manager [9cb0b80b9734ad17c461045a38b0d0a6b8d8686ef46bf8dd99a00d62b8cc67fe] <==
	* 	/usr/local/go/src/bytes/buffer.go:204 +0xbe
	crypto/tls.(*Conn).readFromUntil(0xc000126e00, 0x500dda0, 0xc0008fa500, 0x5, 0xc0008fa500, 0x431)
		/usr/local/go/src/crypto/tls/conn.go:798 +0xf3
	crypto/tls.(*Conn).readRecordOrCCS(0xc000126e00, 0x0, 0x0, 0x0)
		/usr/local/go/src/crypto/tls/conn.go:605 +0x115
	crypto/tls.(*Conn).readRecord(...)
		/usr/local/go/src/crypto/tls/conn.go:573
	crypto/tls.(*Conn).Read(0xc000126e00, 0xc000be3000, 0x1000, 0x1000, 0x0, 0x0, 0x0)
		/usr/local/go/src/crypto/tls/conn.go:1276 +0x165
	bufio.(*Reader).Read(0xc0001c8180, 0xc0002631b8, 0x9, 0x9, 0x9b69eb, 0xc001059c78, 0x4071a5)
		/usr/local/go/src/bufio/bufio.go:227 +0x222
	io.ReadAtLeast(0x5007a00, 0xc0001c8180, 0xc0002631b8, 0x9, 0x9, 0x9, 0xc000c0fec0, 0x4c5c995259c000, 0xc000c0fec0)
		/usr/local/go/src/io/io.go:328 +0x87
	io.ReadFull(...)
		/usr/local/go/src/io/io.go:347
	k8s.io/kubernetes/vendor/golang.org/x/net/http2.readFrameHeader(0xc0002631b8, 0x9, 0x9, 0x5007a00, 0xc0001c8180, 0x0, 0x0, 0x0, 0x0)
		/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/golang.org/x/net/http2/frame.go:237 +0x89
	k8s.io/kubernetes/vendor/golang.org/x/net/http2.(*Framer).ReadFrame(0xc000263180, 0xc00107d3b0, 0x0, 0x0, 0x0)
		/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/golang.org/x/net/http2/frame.go:492 +0xa5
	k8s.io/kubernetes/vendor/golang.org/x/net/http2.(*clientConnReadLoop).run(0xc001059fa8, 0x0, 0x0)
		/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/golang.org/x/net/http2/transport.go:1819 +0xd8
	k8s.io/kubernetes/vendor/golang.org/x/net/http2.(*ClientConn).readLoop(0xc000001200)
		/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/golang.org/x/net/http2/transport.go:1741 +0x6f
	created by k8s.io/kubernetes/vendor/golang.org/x/net/http2.(*Transport).newClientConn
		/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/golang.org/x/net/http2/transport.go:705 +0x6c5
	
	* 
	* ==> kube-controller-manager [a4a4ec1132e56e93b77cf82333943e5daaab8efa48df65ced586f5a324cde37d] <==
	* I0813 00:23:57.943260       1 shared_informer.go:247] Caches are synced for ReplicaSet 
	I0813 00:23:57.947953       1 shared_informer.go:247] Caches are synced for deployment 
	I0813 00:23:57.955167       1 shared_informer.go:247] Caches are synced for namespace 
	I0813 00:23:57.955304       1 shared_informer.go:247] Caches are synced for TTL 
	I0813 00:23:57.957499       1 shared_informer.go:247] Caches are synced for cronjob 
	I0813 00:23:57.961016       1 shared_informer.go:240] Waiting for caches to sync for garbage collector
	I0813 00:23:57.962629       1 shared_informer.go:247] Caches are synced for HPA 
	I0813 00:23:57.965129       1 shared_informer.go:247] Caches are synced for job 
	I0813 00:23:57.967122       1 shared_informer.go:247] Caches are synced for certificate-csrsigning-kubelet-serving 
	I0813 00:23:57.971271       1 shared_informer.go:247] Caches are synced for certificate-csrsigning-kubelet-client 
	I0813 00:23:57.972709       1 shared_informer.go:247] Caches are synced for certificate-csrsigning-legacy-unknown 
	I0813 00:23:57.974698       1 shared_informer.go:247] Caches are synced for TTL after finished 
	I0813 00:23:57.982328       1 shared_informer.go:247] Caches are synced for certificate-csrapproving 
	I0813 00:23:57.982525       1 shared_informer.go:247] Caches are synced for certificate-csrsigning-kube-apiserver-client 
	I0813 00:23:57.982606       1 shared_informer.go:247] Caches are synced for stateful set 
	I0813 00:23:57.984923       1 shared_informer.go:247] Caches are synced for bootstrap_signer 
	I0813 00:23:57.989195       1 shared_informer.go:247] Caches are synced for persistent volume 
	I0813 00:23:57.993635       1 shared_informer.go:247] Caches are synced for crt configmap 
	I0813 00:23:58.072945       1 shared_informer.go:247] Caches are synced for disruption 
	I0813 00:23:58.073409       1 disruption.go:371] Sending events to api server.
	I0813 00:23:58.151201       1 shared_informer.go:247] Caches are synced for resource quota 
	I0813 00:23:58.160353       1 shared_informer.go:247] Caches are synced for resource quota 
	I0813 00:23:58.659003       1 shared_informer.go:247] Caches are synced for garbage collector 
	I0813 00:23:58.659723       1 garbagecollector.go:151] Garbage collector: all resource monitors have synced. Proceeding to collect garbage
	I0813 00:23:58.662160       1 shared_informer.go:247] Caches are synced for garbage collector 
	
	* 
	* ==> kube-proxy [6204b21ab9c2c6e799da39874e9eb93e39284c65231dd05603088db2fa6b8e6b] <==
	* I0813 00:23:26.907583       1 node.go:172] Successfully retrieved node IP: 192.168.127.196
	I0813 00:23:26.907779       1 server_others.go:140] Detected node IP 192.168.127.196
	W0813 00:23:26.907836       1 server_others.go:598] Unknown proxy mode "", assuming iptables proxy
	W0813 00:23:27.211577       1 server_others.go:197] No iptables support for IPv6: exit status 3
	I0813 00:23:27.211713       1 server_others.go:208] kube-proxy running in single-stack IPv4 mode
	I0813 00:23:27.211745       1 server_others.go:212] Using iptables Proxier.
	I0813 00:23:27.212315       1 server.go:643] Version: v1.21.3
	I0813 00:23:27.215676       1 config.go:315] Starting service config controller
	I0813 00:23:27.215816       1 shared_informer.go:240] Waiting for caches to sync for service config
	I0813 00:23:27.215853       1 config.go:224] Starting endpoint slice config controller
	I0813 00:23:27.215859       1 shared_informer.go:240] Waiting for caches to sync for endpoint slice config
	W0813 00:23:27.234006       1 warnings.go:70] discovery.k8s.io/v1beta1 EndpointSlice is deprecated in v1.21+, unavailable in v1.25+; use discovery.k8s.io/v1 EndpointSlice
	W0813 00:23:27.237301       1 warnings.go:70] discovery.k8s.io/v1beta1 EndpointSlice is deprecated in v1.21+, unavailable in v1.25+; use discovery.k8s.io/v1 EndpointSlice
	I0813 00:23:27.316802       1 shared_informer.go:247] Caches are synced for endpoint slice config 
	I0813 00:23:27.316986       1 shared_informer.go:247] Caches are synced for service config 
	
	* 
	* ==> kube-proxy [c11b8a977685bc2516a3a180b7d7e5a078649d5b9c68db67af64bdbf0438193c] <==
	* I0813 00:22:08.526654       1 node.go:172] Successfully retrieved node IP: 192.168.127.196
	I0813 00:22:08.527015       1 server_others.go:140] Detected node IP 192.168.127.196
	W0813 00:22:08.527394       1 server_others.go:598] Unknown proxy mode "", assuming iptables proxy
	W0813 00:22:08.644892       1 server_others.go:197] No iptables support for IPv6: exit status 3
	I0813 00:22:08.644915       1 server_others.go:208] kube-proxy running in single-stack IPv4 mode
	I0813 00:22:08.644934       1 server_others.go:212] Using iptables Proxier.
	I0813 00:22:08.647847       1 server.go:643] Version: v1.21.3
	I0813 00:22:08.651170       1 config.go:315] Starting service config controller
	I0813 00:22:08.651751       1 shared_informer.go:240] Waiting for caches to sync for service config
	I0813 00:22:08.657258       1 config.go:224] Starting endpoint slice config controller
	I0813 00:22:08.657749       1 shared_informer.go:240] Waiting for caches to sync for endpoint slice config
	W0813 00:22:08.659509       1 warnings.go:70] discovery.k8s.io/v1beta1 EndpointSlice is deprecated in v1.21+, unavailable in v1.25+; use discovery.k8s.io/v1 EndpointSlice
	W0813 00:22:08.695660       1 warnings.go:70] discovery.k8s.io/v1beta1 EndpointSlice is deprecated in v1.21+, unavailable in v1.25+; use discovery.k8s.io/v1 EndpointSlice
	I0813 00:22:08.752469       1 shared_informer.go:247] Caches are synced for service config 
	I0813 00:22:08.759025       1 shared_informer.go:247] Caches are synced for endpoint slice config 
	
	* 
	* ==> kube-scheduler [3afc8c09f828616463f8d4246cdb7a602c45569e04de078f3b507b5df49993e8] <==
	* E0813 00:21:42.747204       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0813 00:21:42.747540       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0813 00:21:42.748267       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0813 00:21:42.748530       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0813 00:21:42.749032       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0813 00:21:42.749616       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0813 00:21:42.747557       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0813 00:21:42.750598       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1beta1.CSIStorageCapacity: failed to list *v1beta1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0813 00:21:42.751707       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0813 00:21:43.586759       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0813 00:21:43.586851       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0813 00:21:43.611677       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0813 00:21:43.619757       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0813 00:21:43.758571       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1beta1.CSIStorageCapacity: failed to list *v1beta1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0813 00:21:43.808629       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0813 00:21:43.858463       1 reflector.go:138] k8s.io/apiserver/pkg/server/dynamiccertificates/configmap_cafile_content.go:206: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0813 00:21:43.874015       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0813 00:21:43.903878       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0813 00:21:43.932330       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0813 00:21:44.053954       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0813 00:21:44.184579       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0813 00:21:44.276152       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0813 00:21:44.276696       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0813 00:21:45.625656       1 reflector.go:138] k8s.io/apiserver/pkg/server/dynamiccertificates/configmap_cafile_content.go:206: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	I0813 00:21:51.218463       1 shared_informer.go:247] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file 
	
	* 
	* ==> kube-scheduler [64c935095bced983323337af866a47ac732cfe3496f8dfd31387b8833f7cc6c0] <==
	* I0813 00:23:17.339713       1 serving.go:347] Generated self-signed cert in-memory
	W0813 00:23:24.103838       1 requestheader_controller.go:193] Unable to get configmap/extension-apiserver-authentication in kube-system.  Usually fixed by 'kubectl create rolebinding -n kube-system ROLEBINDING_NAME --role=extension-apiserver-authentication-reader --serviceaccount=YOUR_NS:YOUR_SA'
	W0813 00:23:24.103977       1 authentication.go:337] Error looking up in-cluster authentication configuration: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot get resource "configmaps" in API group "" in the namespace "kube-system"
	W0813 00:23:24.103989       1 authentication.go:338] Continuing without authentication configuration. This may treat all requests as anonymous.
	W0813 00:23:24.103997       1 authentication.go:339] To require authentication configuration lookup to succeed, set --authentication-tolerate-lookup-failure=false
	I0813 00:23:24.177616       1 secure_serving.go:197] Serving securely on 127.0.0.1:10259
	I0813 00:23:24.178246       1 tlsconfig.go:240] Starting DynamicServingCertificateController
	I0813 00:23:24.179347       1 configmap_cafile_content.go:202] Starting client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0813 00:23:24.196646       1 shared_informer.go:240] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0813 00:23:24.304977       1 shared_informer.go:247] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file 
	
	* 
	* ==> kubelet <==
	* -- Logs begin at Fri 2021-08-13 00:20:51 UTC, end at Fri 2021-08-13 00:24:11 UTC. --
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.907271    5078 server.go:660] "--cgroups-per-qos enabled, but --cgroup-root was not specified.  defaulting to /"
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.907671    5078 container_manager_linux.go:278] "Container manager verified user specified cgroup-root exists" cgroupRoot=[]
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.907807    5078 container_manager_linux.go:283] "Creating Container Manager object based on Node Config" nodeConfig={RuntimeCgroupsName: SystemCgroupsName: KubeletCgroupsName: ContainerRuntime:remote CgroupsPerQOS:true CgroupRoot:/ CgroupDriver:cgroupfs KubeletRootDir:/var/lib/kubelet ProtectKernelDefaults:false NodeAllocatableConfig:{KubeReservedCgroupName: SystemReservedCgroupName: ReservedSystemCPUs: EnforceNodeAllocatable:map[pods:{}] KubeReserved:map[] SystemReserved:map[] HardEvictionThresholds:[]} QOSReserved:map[] ExperimentalCPUManagerPolicy:none ExperimentalTopologyManagerScope:container ExperimentalCPUManagerReconcilePeriod:10s ExperimentalMemoryManagerPolicy:None ExperimentalMemoryManagerReservedMemory:[] ExperimentalPodPidsLimit:-1 EnforceCPULimits:true CPUCFSQuotaPeriod:100ms ExperimentalTopologyManagerPolicy:none}
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.907872    5078 topology_manager.go:120] "Creating topology manager with policy per scope" topologyPolicyName="none" topologyScopeName="container"
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.907885    5078 container_manager_linux.go:314] "Initializing Topology Manager" policy="none" scope="container"
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.907893    5078 container_manager_linux.go:319] "Creating device plugin manager" devicePluginEnabled=true
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908017    5078 remote_runtime.go:62] parsed scheme: ""
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908027    5078 remote_runtime.go:62] scheme "" not registered, fallback to default scheme
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908151    5078 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{/run/containerd/containerd.sock  <nil> 0 <nil>}] <nil> <nil>}
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908169    5078 clientconn.go:948] ClientConn switching balancer to "pick_first"
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908324    5078 remote_image.go:50] parsed scheme: ""
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908333    5078 remote_image.go:50] scheme "" not registered, fallback to default scheme
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908348    5078 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{/run/containerd/containerd.sock  <nil> 0 <nil>}] <nil> <nil>}
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908359    5078 clientconn.go:948] ClientConn switching balancer to "pick_first"
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908467    5078 kubelet.go:404] "Attempting to sync node with API server"
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908488    5078 kubelet.go:272] "Adding static pod path" path="/etc/kubernetes/manifests"
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908521    5078 kubelet.go:283] "Adding apiserver pod source"
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908555    5078 apiserver.go:42] "Waiting for node sync before watching apiserver pods"
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.909189    5078 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.913695    5078 kuberuntime_manager.go:222] "Container runtime initialized" containerRuntime="containerd" version="v1.4.9" apiVersion="v1alpha2"
	Aug 13 00:24:07 pause-20210813001951-679351 kubelet[5078]: E0813 00:24:07.237758    5078 aws_credentials.go:77] while getting AWS credentials NoCredentialProviders: no valid providers in chain. Deprecated.
	Aug 13 00:24:07 pause-20210813001951-679351 kubelet[5078]:         For verbose messaging see aws.Config.CredentialsChainVerboseErrors
	Aug 13 00:24:07 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:07.244131    5078 server.go:1190] "Started kubelet"
	Aug 13 00:24:07 pause-20210813001951-679351 systemd[1]: kubelet.service: Succeeded.
	Aug 13 00:24:07 pause-20210813001951-679351 systemd[1]: Stopped kubelet: The Kubernetes Node Agent.
	
	* 
	* ==> storage-provisioner [7899222a89a71ffe4fb8d232a5b3799b5c94ea7e430406ce44654a4c80946ed9] <==
	* I0813 00:23:30.839269       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0813 00:23:30.879440       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0813 00:23:30.880848       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0813 00:23:30.925896       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0813 00:23:30.926952       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_pause-20210813001951-679351_90bbf71c-5385-47c0-853d-3e2fde5ecc99!
	I0813 00:23:30.936151       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"44c00147-d6a4-4a55-bec0-85ce7cb56602", APIVersion:"v1", ResourceVersion:"544", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' pause-20210813001951-679351_90bbf71c-5385-47c0-853d-3e2fde5ecc99 became leader
	I0813 00:23:31.046717       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_pause-20210813001951-679351_90bbf71c-5385-47c0-853d-3e2fde5ecc99!
	

                                                
                                                
-- /stdout --
helpers_test.go:255: (dbg) Run:  out/minikube-linux-amd64 status --format={{.APIServer}} -p pause-20210813001951-679351 -n pause-20210813001951-679351
helpers_test.go:255: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.APIServer}} -p pause-20210813001951-679351 -n pause-20210813001951-679351: exit status 2 (250.36369ms)

                                                
                                                
-- stdout --
	Running

                                                
                                                
-- /stdout --
helpers_test.go:255: status error: exit status 2 (may be ok)
helpers_test.go:262: (dbg) Run:  kubectl --context pause-20210813001951-679351 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:271: non-running pods: 
helpers_test.go:273: ======> post-mortem[TestPause/serial/PauseAgain]: describe non-running pods <======
helpers_test.go:276: (dbg) Run:  kubectl --context pause-20210813001951-679351 describe pod 
helpers_test.go:276: (dbg) Non-zero exit: kubectl --context pause-20210813001951-679351 describe pod : exit status 1 (48.832134ms)

                                                
                                                
** stderr ** 
	error: resource name may not be empty

                                                
                                                
** /stderr **
helpers_test.go:278: kubectl --context pause-20210813001951-679351 describe pod : exit status 1
helpers_test.go:223: -----------------------post-mortem--------------------------------
helpers_test.go:240: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Host}} -p pause-20210813001951-679351 -n pause-20210813001951-679351
helpers_test.go:240: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.Host}} -p pause-20210813001951-679351 -n pause-20210813001951-679351: exit status 2 (258.652091ms)

                                                
                                                
-- stdout --
	Running

                                                
                                                
-- /stdout --
helpers_test.go:240: status error: exit status 2 (may be ok)
helpers_test.go:245: <<< TestPause/serial/PauseAgain FAILED: start of post-mortem logs <<<
helpers_test.go:246: ======>  post-mortem[TestPause/serial/PauseAgain]: minikube logs <======
helpers_test.go:248: (dbg) Run:  out/minikube-linux-amd64 -p pause-20210813001951-679351 logs -n 25
helpers_test.go:248: (dbg) Done: out/minikube-linux-amd64 -p pause-20210813001951-679351 logs -n 25: (1.601554483s)
helpers_test.go:253: TestPause/serial/PauseAgain logs: 
-- stdout --
	* 
	* ==> Audit <==
	* |---------|------------------------------------------|------------------------------------------|---------|---------|-------------------------------|-------------------------------|
	| Command |                   Args                   |                 Profile                  |  User   | Version |          Start Time           |           End Time            |
	|---------|------------------------------------------|------------------------------------------|---------|---------|-------------------------------|-------------------------------|
	| stop    | -p                                       | scheduled-stop-20210813001820-679351     | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:19:35 UTC | Fri, 13 Aug 2021 00:19:42 UTC |
	|         | scheduled-stop-20210813001820-679351     |                                          |         |         |                               |                               |
	|         | --schedule 5s                            |                                          |         |         |                               |                               |
	| delete  | -p                                       | scheduled-stop-20210813001820-679351     | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:19:50 UTC | Fri, 13 Aug 2021 00:19:51 UTC |
	|         | scheduled-stop-20210813001820-679351     |                                          |         |         |                               |                               |
	| start   | -p                                       | force-systemd-env-20210813001951-679351  | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:19:51 UTC | Fri, 13 Aug 2021 00:21:03 UTC |
	|         | force-systemd-env-20210813001951-679351  |                                          |         |         |                               |                               |
	|         | --memory=2048 --alsologtostderr          |                                          |         |         |                               |                               |
	|         | -v=5 --driver=kvm2                       |                                          |         |         |                               |                               |
	|         | --container-runtime=containerd           |                                          |         |         |                               |                               |
	| -p      | force-systemd-env-20210813001951-679351  | force-systemd-env-20210813001951-679351  | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:21:03 UTC | Fri, 13 Aug 2021 00:21:04 UTC |
	|         | ssh cat /etc/containerd/config.toml      |                                          |         |         |                               |                               |
	| delete  | -p                                       | force-systemd-env-20210813001951-679351  | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:21:04 UTC | Fri, 13 Aug 2021 00:21:05 UTC |
	|         | force-systemd-env-20210813001951-679351  |                                          |         |         |                               |                               |
	| delete  | -p                                       | kubenet-20210813002105-679351            | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:21:05 UTC | Fri, 13 Aug 2021 00:21:05 UTC |
	|         | kubenet-20210813002105-679351            |                                          |         |         |                               |                               |
	| delete  | -p false-20210813002105-679351           | false-20210813002105-679351              | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:21:05 UTC | Fri, 13 Aug 2021 00:21:05 UTC |
	| start   | -p                                       | offline-containerd-20210813001951-679351 | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:19:51 UTC | Fri, 13 Aug 2021 00:22:10 UTC |
	|         | offline-containerd-20210813001951-679351 |                                          |         |         |                               |                               |
	|         | --alsologtostderr -v=1 --memory=2048     |                                          |         |         |                               |                               |
	|         | --wait=true --driver=kvm2                |                                          |         |         |                               |                               |
	|         | --container-runtime=containerd           |                                          |         |         |                               |                               |
	| delete  | -p                                       | offline-containerd-20210813001951-679351 | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:22:10 UTC | Fri, 13 Aug 2021 00:22:11 UTC |
	|         | offline-containerd-20210813001951-679351 |                                          |         |         |                               |                               |
	| start   | -p                                       | force-systemd-flag-20210813002108-679351 | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:21:08 UTC | Fri, 13 Aug 2021 00:22:38 UTC |
	|         | force-systemd-flag-20210813002108-679351 |                                          |         |         |                               |                               |
	|         | --memory=2048 --force-systemd            |                                          |         |         |                               |                               |
	|         | --alsologtostderr -v=5 --driver=kvm2     |                                          |         |         |                               |                               |
	|         | --container-runtime=containerd           |                                          |         |         |                               |                               |
	| -p      | force-systemd-flag-20210813002108-679351 | force-systemd-flag-20210813002108-679351 | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:22:38 UTC | Fri, 13 Aug 2021 00:22:38 UTC |
	|         | ssh cat /etc/containerd/config.toml      |                                          |         |         |                               |                               |
	| delete  | -p                                       | force-systemd-flag-20210813002108-679351 | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:22:38 UTC | Fri, 13 Aug 2021 00:22:40 UTC |
	|         | force-systemd-flag-20210813002108-679351 |                                          |         |         |                               |                               |
	| start   | -p pause-20210813001951-679351           | pause-20210813001951-679351              | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:19:51 UTC | Fri, 13 Aug 2021 00:22:49 UTC |
	|         | --memory=2048                            |                                          |         |         |                               |                               |
	|         | --install-addons=false                   |                                          |         |         |                               |                               |
	|         | --wait=all --driver=kvm2                 |                                          |         |         |                               |                               |
	|         | --container-runtime=containerd           |                                          |         |         |                               |                               |
	| start   | -p                                       | cert-options-20210813002211-679351       | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:22:11 UTC | Fri, 13 Aug 2021 00:23:29 UTC |
	|         | cert-options-20210813002211-679351       |                                          |         |         |                               |                               |
	|         | --memory=2048                            |                                          |         |         |                               |                               |
	|         | --apiserver-ips=127.0.0.1                |                                          |         |         |                               |                               |
	|         | --apiserver-ips=192.168.15.15            |                                          |         |         |                               |                               |
	|         | --apiserver-names=localhost              |                                          |         |         |                               |                               |
	|         | --apiserver-names=www.google.com         |                                          |         |         |                               |                               |
	|         | --apiserver-port=8555                    |                                          |         |         |                               |                               |
	|         | --driver=kvm2                            |                                          |         |         |                               |                               |
	|         | --container-runtime=containerd           |                                          |         |         |                               |                               |
	| -p      | cert-options-20210813002211-679351       | cert-options-20210813002211-679351       | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:23:29 UTC | Fri, 13 Aug 2021 00:23:29 UTC |
	|         | ssh openssl x509 -text -noout -in        |                                          |         |         |                               |                               |
	|         | /var/lib/minikube/certs/apiserver.crt    |                                          |         |         |                               |                               |
	| delete  | -p                                       | cert-options-20210813002211-679351       | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:23:30 UTC | Fri, 13 Aug 2021 00:23:31 UTC |
	|         | cert-options-20210813002211-679351       |                                          |         |         |                               |                               |
	| start   | -p                                       | stopped-upgrade-20210813001951-679351    | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:22:29 UTC | Fri, 13 Aug 2021 00:23:40 UTC |
	|         | stopped-upgrade-20210813001951-679351    |                                          |         |         |                               |                               |
	|         | --memory=2200 --alsologtostderr          |                                          |         |         |                               |                               |
	|         | -v=1 --driver=kvm2                       |                                          |         |         |                               |                               |
	|         | --container-runtime=containerd           |                                          |         |         |                               |                               |
	| logs    | -p                                       | stopped-upgrade-20210813001951-679351    | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:23:40 UTC | Fri, 13 Aug 2021 00:23:42 UTC |
	|         | stopped-upgrade-20210813001951-679351    |                                          |         |         |                               |                               |
	| delete  | -p                                       | stopped-upgrade-20210813001951-679351    | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:23:42 UTC | Fri, 13 Aug 2021 00:23:43 UTC |
	|         | stopped-upgrade-20210813001951-679351    |                                          |         |         |                               |                               |
	| start   | -p pause-20210813001951-679351           | pause-20210813001951-679351              | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:22:49 UTC | Fri, 13 Aug 2021 00:23:59 UTC |
	|         | --alsologtostderr                        |                                          |         |         |                               |                               |
	|         | -v=1 --driver=kvm2                       |                                          |         |         |                               |                               |
	|         | --container-runtime=containerd           |                                          |         |         |                               |                               |
	| pause   | -p pause-20210813001951-679351           | pause-20210813001951-679351              | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:23:59 UTC | Fri, 13 Aug 2021 00:24:00 UTC |
	|         | --alsologtostderr -v=5                   |                                          |         |         |                               |                               |
	| unpause | -p pause-20210813001951-679351           | pause-20210813001951-679351              | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:24:00 UTC | Fri, 13 Aug 2021 00:24:03 UTC |
	|         | --alsologtostderr -v=5                   |                                          |         |         |                               |                               |
	| start   | -p                                       | kubernetes-upgrade-20210813002240-679351 | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:22:40 UTC | Fri, 13 Aug 2021 00:24:04 UTC |
	|         | kubernetes-upgrade-20210813002240-679351 |                                          |         |         |                               |                               |
	|         | --memory=2200                            |                                          |         |         |                               |                               |
	|         | --kubernetes-version=v1.14.0             |                                          |         |         |                               |                               |
	|         | --alsologtostderr -v=1 --driver=kvm2     |                                          |         |         |                               |                               |
	|         | --container-runtime=containerd           |                                          |         |         |                               |                               |
	| stop    | -p                                       | kubernetes-upgrade-20210813002240-679351 | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:24:04 UTC | Fri, 13 Aug 2021 00:24:10 UTC |
	|         | kubernetes-upgrade-20210813002240-679351 |                                          |         |         |                               |                               |
	| -p      | pause-20210813001951-679351              | pause-20210813001951-679351              | jenkins | v1.22.0 | Fri, 13 Aug 2021 00:24:10 UTC | Fri, 13 Aug 2021 00:24:11 UTC |
	|         | logs -n 25                               |                                          |         |         |                               |                               |
	|---------|------------------------------------------|------------------------------------------|---------|---------|-------------------------------|-------------------------------|
	
	* 
	* ==> Last Start <==
	* Log file created at: 2021/08/13 00:24:11
	Running on machine: debian-jenkins-agent-10
	Binary: Built with gc go1.16.7 for linux/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0813 00:24:11.119018  717315 out.go:298] Setting OutFile to fd 1 ...
	I0813 00:24:11.119125  717315 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0813 00:24:11.119134  717315 out.go:311] Setting ErrFile to fd 2...
	I0813 00:24:11.119138  717315 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0813 00:24:11.119251  717315 root.go:313] Updating PATH: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/bin
	I0813 00:24:11.119487  717315 out.go:305] Setting JSON to false
	I0813 00:24:11.155154  717315 start.go:111] hostinfo: {"hostname":"debian-jenkins-agent-10","uptime":14814,"bootTime":1628799437,"procs":204,"os":"linux","platform":"debian","platformFamily":"debian","platformVersion":"9.13","kernelVersion":"4.9.0-16-amd64","kernelArch":"x86_64","virtualizationSystem":"kvm","virtualizationRole":"guest","hostId":"c29e0b88-ef83-6765-d2fa-208fdce1af32"}
	I0813 00:24:11.155270  717315 start.go:121] virtualization: kvm guest
	I0813 00:24:11.158258  717315 out.go:177] * [kubernetes-upgrade-20210813002240-679351] minikube v1.22.0 on Debian 9.13 (kvm/amd64)
	I0813 00:24:11.159950  717315 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/kubeconfig
	I0813 00:24:11.158423  717315 notify.go:169] Checking for updates...
	I0813 00:24:11.161434  717315 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-amd64
	I0813 00:24:11.162942  717315 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube
	I0813 00:24:11.164427  717315 out.go:177]   - MINIKUBE_LOCATION=12230
	I0813 00:24:11.165289  717315 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:24:11.165347  717315 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:24:11.177008  717315 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:39705
	I0813 00:24:11.177479  717315 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:24:11.178172  717315 main.go:130] libmachine: Using API Version  1
	I0813 00:24:11.178199  717315 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:24:11.185428  717315 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:24:11.185769  717315 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .DriverName
	I0813 00:24:11.186177  717315 driver.go:335] Setting default libvirt URI to qemu:///system
	I0813 00:24:11.186609  717315 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:24:11.186656  717315 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:24:11.198110  717315 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:36983
	I0813 00:24:11.198547  717315 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:24:11.199530  717315 main.go:130] libmachine: Using API Version  1
	I0813 00:24:11.199558  717315 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:24:11.199935  717315 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:24:11.200112  717315 main.go:130] libmachine: (kubernetes-upgrade-20210813002240-679351) Calling .DriverName
	I0813 00:24:11.236730  717315 out.go:177] * Using the kvm2 driver based on existing profile
	I0813 00:24:11.236757  717315 start.go:278] selected driver: kvm2
	I0813 00:24:11.236763  717315 start.go:751] validating driver "kvm2" against &{Name:kubernetes-upgrade-20210813002240-679351 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube-builds/iso/12122/minikube-v1.22.0-1628238775-12122.iso KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.25@sha256:6f936e3443b95cd918d77623bf7b595653bb382766e280290a02b4a349e88b79 Memory:2200 CPUs:2 DiskSize:20000 VMDriver: Driver:kvm2 HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.99.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.
14.0 ClusterName:kubernetes-upgrade-20210813002240-679351 Namespace:default APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI: NodeIP: NodePort:8443 NodeName:} Nodes:[{Name: IP:192.168.50.136 Port:8443 KubernetesVersion:v1.14.0 ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: MultiNodeRequested:false ExtraDisks:0}
	I0813 00:24:11.236883  717315 start.go:762] status for kvm2: {Installed:true Healthy:true Running:true NeedsImprovement:false Error:<nil> Reason: Fix: Doc:}
	I0813 00:24:11.237960  717315 install.go:52] acquiring lock: {Name:mk900956b073697a4aa6c80a27c6bb0742a99a53 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0813 00:24:11.238119  717315 install.go:117] Validating docker-machine-driver-kvm2, PATH=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/bin:/home/jenkins/workspace/KVM_Linux_containerd_integration/out/:/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games:/usr/local/go/bin:/home/jenkins/go/bin:/usr/local/bin/:/usr/local/go/bin/:/home/jenkins/go/bin
	I0813 00:24:11.256957  717315 install.go:137] /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2 version is 1.22.0
	I0813 00:24:11.257394  717315 cni.go:93] Creating CNI manager for ""
	I0813 00:24:11.257413  717315 cni.go:163] "kvm2" driver + containerd runtime found, recommending bridge
	I0813 00:24:11.257425  717315 start_flags.go:277] config:
	{Name:kubernetes-upgrade-20210813002240-679351 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube-builds/iso/12122/minikube-v1.22.0-1628238775-12122.iso KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.25@sha256:6f936e3443b95cd918d77623bf7b595653bb382766e280290a02b4a349e88b79 Memory:2200 CPUs:2 DiskSize:20000 VMDriver: Driver:kvm2 HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.99.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.22.0-rc.0 ClusterName:kubernetes-upgrade-20210813002240-679351 Namespace:defau
lt APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI: NodeIP: NodePort:8443 NodeName:} Nodes:[{Name: IP:192.168.50.136 Port:8443 KubernetesVersion:v1.14.0 ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: MultiNodeRequested:false ExtraDisks:0}
	I0813 00:24:11.257652  717315 iso.go:123] acquiring lock: {Name:mke80f4e00d5590a17349e0875191e5cd211cb9b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	
	* 
	* ==> container status <==
	* CONTAINER           IMAGE               CREATED             STATE               NAME                      ATTEMPT             POD ID
	a4a4ec1132e56       bc2bb319a7038       28 seconds ago      Running             kube-controller-manager   2                   a8ae77b235803
	7899222a89a71       6e38f40d628db       42 seconds ago      Running             storage-provisioner       0                   4a6e63928b11f
	d30696c5405d8       296a6d5035e2d       54 seconds ago      Running             coredns                   1                   f5e6ffa407fcd
	6204b21ab9c2c       adb2816ea823a       56 seconds ago      Running             kube-proxy                1                   46f36163c53e1
	705c524b7bd2d       0369cf4303ffd       56 seconds ago      Running             etcd                      1                   accc3895e9638
	64c935095bced       6be0dc1302e30       57 seconds ago      Running             kube-scheduler            1                   2c9091a6bd8d7
	85bd885bbae1e       3d174f00aa39e       57 seconds ago      Running             kube-apiserver            1                   bb4e7930b2ada
	9cb0b80b9734a       bc2bb319a7038       57 seconds ago      Exited              kube-controller-manager   1                   a8ae77b235803
	efb6b8992aa82       296a6d5035e2d       2 minutes ago       Exited              coredns                   0                   36cd319da9139
	c11b8a977685b       adb2816ea823a       2 minutes ago       Exited              kube-proxy                0                   7d0f0371d8768
	2efebee19d7a6       0369cf4303ffd       2 minutes ago       Exited              etcd                      0                   54509c8ec15d2
	3874ae5baf2d8       3d174f00aa39e       2 minutes ago       Exited              kube-apiserver            0                   c77d72f80a55b
	3afc8c09f8286       6be0dc1302e30       2 minutes ago       Exited              kube-scheduler            0                   1bd10dcb9a7ea
	
	* 
	* ==> containerd <==
	* -- Logs begin at Fri 2021-08-13 00:20:51 UTC, end at Fri 2021-08-13 00:24:13 UTC. --
	Aug 13 00:23:18 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:18.421885649Z" level=info msg="StartContainer for \"6204b21ab9c2c6e799da39874e9eb93e39284c65231dd05603088db2fa6b8e6b\" returns successfully"
	Aug 13 00:23:18 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:18.634895282Z" level=info msg="StartContainer for \"d30696c5405d8e3fbc2bfe7ef7e391b98c301d6056d08f1d32a9614f101edc6f\" returns successfully"
	Aug 13 00:23:27 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:27.651258400Z" level=info msg="Finish piping stderr of container \"9cb0b80b9734ad17c461045a38b0d0a6b8d8686ef46bf8dd99a00d62b8cc67fe\""
	Aug 13 00:23:27 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:27.652374051Z" level=info msg="Finish piping stdout of container \"9cb0b80b9734ad17c461045a38b0d0a6b8d8686ef46bf8dd99a00d62b8cc67fe\""
	Aug 13 00:23:27 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:27.654645594Z" level=info msg="TaskExit event &TaskExit{ContainerID:9cb0b80b9734ad17c461045a38b0d0a6b8d8686ef46bf8dd99a00d62b8cc67fe,ID:9cb0b80b9734ad17c461045a38b0d0a6b8d8686ef46bf8dd99a00d62b8cc67fe,Pid:4197,ExitStatus:255,ExitedAt:2021-08-13 00:23:27.653997105 +0000 UTC,XXX_unrecognized:[],}"
	Aug 13 00:23:27 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:27.765885334Z" level=info msg="shim disconnected" id=9cb0b80b9734ad17c461045a38b0d0a6b8d8686ef46bf8dd99a00d62b8cc67fe
	Aug 13 00:23:27 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:27.766396530Z" level=error msg="copy shim log" error="read /proc/self/fd/58: file already closed"
	Aug 13 00:23:28 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:28.010584031Z" level=info msg="RemoveContainer for \"112c2918d72a90f4b0bbe9d6e1b3134149bf89a77f20de44d4059a1ad6edeff4\""
	Aug 13 00:23:28 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:28.021541263Z" level=info msg="RemoveContainer for \"112c2918d72a90f4b0bbe9d6e1b3134149bf89a77f20de44d4059a1ad6edeff4\" returns successfully"
	Aug 13 00:23:29 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:29.821371568Z" level=info msg="RunPodsandbox for &PodSandboxMetadata{Name:storage-provisioner,Uid:b781b362-9644-4c96-a463-4cb61bc5ab58,Namespace:kube-system,Attempt:0,}"
	Aug 13 00:23:29 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:29.875382976Z" level=info msg="starting signal loop" namespace=k8s.io path=/run/containerd/io.containerd.runtime.v2.task/k8s.io/4a6e63928b11fedafa692c25459f3246b10afd3d4379e021576f2665ed408565 pid=4636
	Aug 13 00:23:30 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:30.445012017Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:storage-provisioner,Uid:b781b362-9644-4c96-a463-4cb61bc5ab58,Namespace:kube-system,Attempt:0,} returns sandbox id \"4a6e63928b11fedafa692c25459f3246b10afd3d4379e021576f2665ed408565\""
	Aug 13 00:23:30 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:30.455717251Z" level=info msg="CreateContainer within sandbox \"4a6e63928b11fedafa692c25459f3246b10afd3d4379e021576f2665ed408565\" for container &ContainerMetadata{Name:storage-provisioner,Attempt:0,}"
	Aug 13 00:23:30 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:30.529886160Z" level=info msg="CreateContainer within sandbox \"4a6e63928b11fedafa692c25459f3246b10afd3d4379e021576f2665ed408565\" for &ContainerMetadata{Name:storage-provisioner,Attempt:0,} returns container id \"7899222a89a71ffe4fb8d232a5b3799b5c94ea7e430406ce44654a4c80946ed9\""
	Aug 13 00:23:30 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:30.531938735Z" level=info msg="StartContainer for \"7899222a89a71ffe4fb8d232a5b3799b5c94ea7e430406ce44654a4c80946ed9\""
	Aug 13 00:23:30 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:30.785895579Z" level=info msg="StartContainer for \"7899222a89a71ffe4fb8d232a5b3799b5c94ea7e430406ce44654a4c80946ed9\" returns successfully"
	Aug 13 00:23:45 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:45.128731300Z" level=info msg="CreateContainer within sandbox \"a8ae77b235803b4e19e0eb0a6e8e4d70a30100102282506be869694a0b95d264\" for container &ContainerMetadata{Name:kube-controller-manager,Attempt:2,}"
	Aug 13 00:23:45 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:45.216530954Z" level=info msg="CreateContainer within sandbox \"a8ae77b235803b4e19e0eb0a6e8e4d70a30100102282506be869694a0b95d264\" for &ContainerMetadata{Name:kube-controller-manager,Attempt:2,} returns container id \"a4a4ec1132e56e93b77cf82333943e5daaab8efa48df65ced586f5a324cde37d\""
	Aug 13 00:23:45 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:45.221615537Z" level=info msg="StartContainer for \"a4a4ec1132e56e93b77cf82333943e5daaab8efa48df65ced586f5a324cde37d\""
	Aug 13 00:23:45 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:45.473360047Z" level=info msg="StartContainer for \"a4a4ec1132e56e93b77cf82333943e5daaab8efa48df65ced586f5a324cde37d\" returns successfully"
	Aug 13 00:23:54 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:54.024224370Z" level=info msg="StopPodSandbox for \"010dd466bc9609853f86415ca26b64dbc3754ff0bcb704d6a8abdf03248fe11a\""
	Aug 13 00:23:54 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:54.024332361Z" level=info msg="TearDown network for sandbox \"010dd466bc9609853f86415ca26b64dbc3754ff0bcb704d6a8abdf03248fe11a\" successfully"
	Aug 13 00:23:54 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:54.024344869Z" level=info msg="StopPodSandbox for \"010dd466bc9609853f86415ca26b64dbc3754ff0bcb704d6a8abdf03248fe11a\" returns successfully"
	Aug 13 00:23:54 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:54.026182269Z" level=info msg="RemovePodSandbox for \"010dd466bc9609853f86415ca26b64dbc3754ff0bcb704d6a8abdf03248fe11a\""
	Aug 13 00:23:54 pause-20210813001951-679351 containerd[3537]: time="2021-08-13T00:23:54.033764742Z" level=info msg="RemovePodSandbox \"010dd466bc9609853f86415ca26b64dbc3754ff0bcb704d6a8abdf03248fe11a\" returns successfully"
	
	* 
	* ==> coredns [d30696c5405d8e3fbc2bfe7ef7e391b98c301d6056d08f1d32a9614f101edc6f] <==
	* [INFO] plugin/ready: Still waiting on: "kubernetes"
	.:53
	[INFO] plugin/reload: Running configuration MD5 = 21fa5447a9370c672668c17fadc8028a
	CoreDNS-1.8.0
	linux/amd64, go1.15.3, 054c9ae
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	
	* 
	* ==> coredns [efb6b8992aa826974c98985c6dbeb065b7a93d6ceeacacae98b06ec18bbfd5bb] <==
	* I0813 00:22:38.694893       1 trace.go:205] Trace[2019727887]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.19.2/tools/cache/reflector.go:156 (13-Aug-2021 00:22:08.692) (total time: 30002ms):
	Trace[2019727887]: [30.002199421s] [30.002199421s] END
	E0813 00:22:38.695325       1 reflector.go:127] pkg/mod/k8s.io/client-go@v0.19.2/tools/cache/reflector.go:156: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0813 00:22:38.695485       1 trace.go:205] Trace[911902081]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.19.2/tools/cache/reflector.go:156 (13-Aug-2021 00:22:08.694) (total time: 30001ms):
	Trace[911902081]: [30.001141967s] [30.001141967s] END
	E0813 00:22:38.695755       1 reflector.go:127] pkg/mod/k8s.io/client-go@v0.19.2/tools/cache/reflector.go:156: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0813 00:22:38.695034       1 trace.go:205] Trace[1427131847]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.19.2/tools/cache/reflector.go:156 (13-Aug-2021 00:22:08.694) (total time: 30000ms):
	Trace[1427131847]: [30.000422913s] [30.000422913s] END
	E0813 00:22:38.696466       1 reflector.go:127] pkg/mod/k8s.io/client-go@v0.19.2/tools/cache/reflector.go:156: Failed to watch *v1.Endpoints: failed to list *v1.Endpoints: Get "https://10.96.0.1:443/api/v1/endpoints?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	.:53
	[INFO] plugin/reload: Running configuration MD5 = db32ca3650231d74073ff4cf814959a7
	CoreDNS-1.8.0
	linux/amd64, go1.15.3, 054c9ae
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] Reloading
	[INFO] plugin/health: Going into lameduck mode for 5s
	[INFO] plugin/reload: Running configuration MD5 = 21fa5447a9370c672668c17fadc8028a
	[INFO] Reloading complete
	
	* 
	* ==> describe nodes <==
	* Name:               pause-20210813001951-679351
	Roles:              control-plane,master
	Labels:             beta.kubernetes.io/arch=amd64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=amd64
	                    kubernetes.io/hostname=pause-20210813001951-679351
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=dc1c3ca26e9449ce488a773126b8450402c94a19
	                    minikube.k8s.io/name=pause-20210813001951-679351
	                    minikube.k8s.io/updated_at=2021_08_13T00_21_48_0700
	                    minikube.k8s.io/version=v1.22.0
	                    node-role.kubernetes.io/control-plane=
	                    node-role.kubernetes.io/master=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: /run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Fri, 13 Aug 2021 00:21:44 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  pause-20210813001951-679351
	  AcquireTime:     <unset>
	  RenewTime:       Fri, 13 Aug 2021 00:23:57 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Fri, 13 Aug 2021 00:21:57 +0000   Fri, 13 Aug 2021 00:21:38 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Fri, 13 Aug 2021 00:21:57 +0000   Fri, 13 Aug 2021 00:21:38 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Fri, 13 Aug 2021 00:21:57 +0000   Fri, 13 Aug 2021 00:21:38 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Fri, 13 Aug 2021 00:21:57 +0000   Fri, 13 Aug 2021 00:21:57 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.127.196
	  Hostname:    pause-20210813001951-679351
	Capacity:
	  cpu:                2
	  ephemeral-storage:  17784752Ki
	  hugepages-2Mi:      0
	  memory:             2033024Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  17784752Ki
	  hugepages-2Mi:      0
	  memory:             2033024Ki
	  pods:               110
	System Info:
	  Machine ID:                 9109403b29744bfb99029b25cc4f9da7
	  System UUID:                9109403b-2974-4bfb-9902-9b25cc4f9da7
	  Boot ID:                    0d1f634d-bf20-456d-8420-8e644eba3e38
	  Kernel Version:             4.19.182
	  OS Image:                   Buildroot 2020.02.12
	  Operating System:           linux
	  Architecture:               amd64
	  Container Runtime Version:  containerd://1.4.9
	  Kubelet Version:            v1.21.3
	  Kube-Proxy Version:         v1.21.3
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (7 in total)
	  Namespace                   Name                                                   CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                                   ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-558bd4d5db-xjmwl                               100m (5%!)(MISSING)     0 (0%!)(MISSING)      70Mi (3%!)(MISSING)        170Mi (8%!)(MISSING)     2m8s
	  kube-system                 etcd-pause-20210813001951-679351                       100m (5%!)(MISSING)     0 (0%!)(MISSING)      100Mi (5%!)(MISSING)       0 (0%!)(MISSING)         2m28s
	  kube-system                 kube-apiserver-pause-20210813001951-679351             250m (12%!)(MISSING)    0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         2m19s
	  kube-system                 kube-controller-manager-pause-20210813001951-679351    200m (10%!)(MISSING)    0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         2m19s
	  kube-system                 kube-proxy-2mkpr                                       0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         2m8s
	  kube-system                 kube-scheduler-pause-20210813001951-679351             100m (5%!)(MISSING)     0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         2m19s
	  kube-system                 storage-provisioner                                    0 (0%!)(MISSING)        0 (0%!)(MISSING)      0 (0%!)(MISSING)           0 (0%!)(MISSING)         44s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                750m (37%!)(MISSING)  0 (0%!)(MISSING)
	  memory             170Mi (8%!)(MISSING)  170Mi (8%!)(MISSING)
	  ephemeral-storage  0 (0%!)(MISSING)      0 (0%!)(MISSING)
	  hugepages-2Mi      0 (0%!)(MISSING)      0 (0%!)(MISSING)
	Events:
	  Type    Reason                   Age                    From        Message
	  ----    ------                   ----                   ----        -------
	  Normal  NodeHasSufficientMemory  2m41s (x6 over 2m41s)  kubelet     Node pause-20210813001951-679351 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    2m41s (x6 over 2m41s)  kubelet     Node pause-20210813001951-679351 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     2m41s (x5 over 2m41s)  kubelet     Node pause-20210813001951-679351 status is now: NodeHasSufficientPID
	  Normal  Starting                 2m20s                  kubelet     Starting kubelet.
	  Normal  NodeHasSufficientMemory  2m19s                  kubelet     Node pause-20210813001951-679351 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    2m19s                  kubelet     Node pause-20210813001951-679351 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     2m19s                  kubelet     Node pause-20210813001951-679351 status is now: NodeHasSufficientPID
	  Normal  NodeAllocatableEnforced  2m19s                  kubelet     Updated Node Allocatable limit across pods
	  Normal  NodeReady                2m16s                  kubelet     Node pause-20210813001951-679351 status is now: NodeReady
	  Normal  Starting                 2m5s                   kube-proxy  Starting kube-proxy.
	  Normal  Starting                 46s                    kube-proxy  Starting kube-proxy.
	
	* 
	* ==> dmesg <==
	*               on the kernel command line
	[  +0.000130] platform regulatory.0: Direct firmware load for regulatory.db failed with error -2
	[  +4.671269] systemd-fstab-generator[1160]: Ignoring "noauto" for root device
	[  +0.045892] systemd[1]: system-getty.slice: unit configures an IP firewall, but the local system does not support BPF/cgroup firewalling.
	[  +0.000003] systemd[1]: (This warning is only shown for the first unit using IP firewalling.)
	[  +1.192244] SELinux: unrecognized netlink message: protocol=0 nlmsg_type=106 sclass=netlink_route_socket pid=1717 comm=systemd-network
	[  +2.439853] NFSD: the nfsdcld client tracking upcall will be removed in 3.10. Please transition to using nfsdcltrack.
	[  +2.154553] vboxguest: loading out-of-tree module taints kernel.
	[  +0.006032] vboxguest: PCI device not found, probably running on physical hardware.
	[Aug13 00:21] systemd-fstab-generator[2094]: Ignoring "noauto" for root device
	[  +0.270933] systemd-fstab-generator[2126]: Ignoring "noauto" for root device
	[  +0.157086] systemd-fstab-generator[2141]: Ignoring "noauto" for root device
	[  +0.205044] systemd-fstab-generator[2172]: Ignoring "noauto" for root device
	[  +8.129227] systemd-fstab-generator[2377]: Ignoring "noauto" for root device
	[ +21.150444] systemd-fstab-generator[2810]: Ignoring "noauto" for root device
	[Aug13 00:22] kauditd_printk_skb: 38 callbacks suppressed
	[ +41.503227] kauditd_printk_skb: 65 callbacks suppressed
	[  +6.452067] NFSD: Unable to end grace period: -110
	[Aug13 00:23] systemd-fstab-generator[3489]: Ignoring "noauto" for root device
	[  +0.238108] systemd-fstab-generator[3502]: Ignoring "noauto" for root device
	[  +0.253277] systemd-fstab-generator[3527]: Ignoring "noauto" for root device
	[ +17.104028] kauditd_printk_skb: 29 callbacks suppressed
	[ +32.355407] systemd-fstab-generator[4902]: Ignoring "noauto" for root device
	[Aug13 00:24] systemd-fstab-generator[5070]: Ignoring "noauto" for root device
	[  +3.299751] systemd-fstab-generator[5099]: Ignoring "noauto" for root device
	
	* 
	* ==> etcd [2efebee19d7a6bd77fd1333dab2cc543c575e8c6babdae865b90a1cf0fa48744] <==
	* 2021-08-13 00:22:07.230197 W | etcdserver: read-only range request "key:\"/registry/endpointslices/kube-system/kube-dns-5fjzs\" " with result "range_response_count:1 size:1013" took too long (671.993662ms) to execute
	2021-08-13 00:22:07.230468 W | etcdserver: read-only range request "key:\"/registry/serviceaccounts/kube-system/coredns\" " with result "range_response_count:1 size:217" took too long (741.585441ms) to execute
	2021-08-13 00:22:14.207544 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2021-08-13 00:22:24.208238 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2021-08-13 00:22:34.210334 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2021-08-13 00:22:45.207896 W | etcdserver/api/etcdhttp: /health error; QGET failed etcdserver: request timed out (status code 503)
	2021-08-13 00:22:45.919806 W | etcdserver: read-only range request "key:\"/registry/health\" " with result "error:context deadline exceeded" took too long (2.000045576s) to execute
	2021-08-13 00:22:47.101521 W | wal: sync duration of 4.051774181s, expected less than 1s
	2021-08-13 00:22:47.103383 W | etcdserver: read-only range request "key:\"/registry/pods/kube-system/coredns-558bd4d5db-xjmwl\" " with result "range_response_count:1 size:4746" took too long (3.677716425s) to execute
	2021-08-13 00:22:47.104500 W | etcdserver: read-only range request "key:\"/registry/namespaces/kube-system\" " with result "range_response_count:1 size:351" took too long (2.60570306s) to execute
	2021-08-13 00:22:47.104825 W | etcdserver: read-only range request "key:\"/registry/namespaces/default\" " with result "range_response_count:1 size:341" took too long (894.799494ms) to execute
	2021-08-13 00:22:47.105989 W | etcdserver: read-only range request "key:\"/registry/pods/kube-system/coredns-558bd4d5db-xjmwl\" " with result "range_response_count:1 size:4746" took too long (1.568667767s) to execute
	2021-08-13 00:22:47.106790 W | etcdserver: read-only range request "key:\"/registry/health\" " with result "range_response_count:0 size:5" took too long (1.176235696s) to execute
	2021-08-13 00:22:47.107462 W | etcdserver: read-only range request "key:\"/registry/flowschemas/exempt\" " with result "range_response_count:1 size:879" took too long (2.454270208s) to execute
	2021-08-13 00:22:47.950458 W | etcdserver: request "header:<ID:12242045188531646344 username:\"kube-apiserver-etcd-client\" auth_revision:1 > lease_grant:<ttl:3660-second id:29e47b3ce2b56f87>" with result "size:41" took too long (470.318775ms) to execute
	2021-08-13 00:22:47.951739 W | etcdserver: read-only range request "key:\"/registry/namespaces/kube-node-lease\" " with result "range_response_count:1 size:363" took too long (801.202969ms) to execute
	2021-08-13 00:22:47.955016 W | etcdserver: read-only range request "key:\"/registry/prioritylevelconfigurations/exempt\" " with result "range_response_count:1 size:371" took too long (804.019176ms) to execute
	2021-08-13 00:22:47.956311 W | etcdserver: read-only range request "key:\"/registry/pods/kube-system/coredns-558bd4d5db-xjmwl\" " with result "range_response_count:1 size:4568" took too long (530.617681ms) to execute
	2021-08-13 00:22:47.958454 W | etcdserver: read-only range request "key:\"/registry/priorityclasses/\" range_end:\"/registry/priorityclasses0\" count_only:true " with result "range_response_count:0 size:7" took too long (309.016563ms) to execute
	2021-08-13 00:22:48.801008 W | etcdserver: read-only range request "key:\"/registry/minions/pause-20210813001951-679351\" " with result "range_response_count:1 size:4776" took too long (768.144463ms) to execute
	2021-08-13 00:22:48.802711 W | etcdserver: read-only range request "key:\"/registry/controllers/\" range_end:\"/registry/controllers0\" count_only:true " with result "range_response_count:0 size:5" took too long (347.906261ms) to execute
	2021-08-13 00:22:49.019606 W | etcdserver: read-only range request "key:\"/registry/serviceaccounts/default/\" range_end:\"/registry/serviceaccounts/default0\" " with result "range_response_count:1 size:209" took too long (111.551006ms) to execute
	2021-08-13 00:22:49.020521 W | etcdserver: read-only range request "key:\"/registry/health\" " with result "range_response_count:0 size:5" took too long (100.648686ms) to execute
	2021-08-13 00:22:54.217451 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2021-08-13 00:23:04.208424 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	
	* 
	* ==> etcd [705c524b7bd2d071e133ec74fb1f433cab624312145e8e0d2e4b19e7936be85b] <==
	* 2021-08-13 00:23:25.998767 W | etcdserver: read-only range request "key:\"/registry/leases/kube-node-lease/pause-20210813001951-679351\" " with result "range_response_count:1 size:671" took too long (1.83123856s) to execute
	2021-08-13 00:23:25.999024 W | etcdserver: read-only range request "key:\"/registry/pods/kube-system/kube-apiserver-pause-20210813001951-679351\" " with result "range_response_count:1 size:6537" took too long (1.83134851s) to execute
	2021-08-13 00:23:25.999710 W | etcdserver: read-only range request "key:\"/registry/services/endpoints/default/kubernetes\" " with result "range_response_count:1 size:423" took too long (1.836183835s) to execute
	2021-08-13 00:23:26.000315 W | etcdserver: read-only range request "key:\"/registry/serviceaccounts/kube-system/kube-proxy\" " with result "range_response_count:1 size:226" took too long (1.856850168s) to execute
	2021-08-13 00:23:26.003298 W | etcdserver: read-only range request "key:\"/registry/prioritylevelconfigurations/exempt\" " with result "range_response_count:1 size:371" took too long (1.862496869s) to execute
	2021-08-13 00:23:26.866787 W | etcdserver: read-only range request "key:\"/registry/clusterrolebindings/\" range_end:\"/registry/clusterrolebindings0\" " with result "range_response_count:50 size:37065" took too long (849.021882ms) to execute
	2021-08-13 00:23:26.868169 W | etcdserver: request "header:<ID:12242045188557790365 username:\"kube-apiserver-etcd-client\" auth_revision:1 > txn:<compare:<target:MOD key:\"/registry/leases/kube-node-lease/pause-20210813001951-679351\" mod_revision:486 > success:<request_put:<key:\"/registry/leases/kube-node-lease/pause-20210813001951-679351\" value_size:587 >> failure:<request_range:<key:\"/registry/leases/kube-node-lease/pause-20210813001951-679351\" > >>" with result "size:16" took too long (533.569848ms) to execute
	2021-08-13 00:23:26.875022 W | etcdserver: read-only range request "key:\"/registry/health\" " with result "range_response_count:0 size:5" took too long (380.351682ms) to execute
	2021-08-13 00:23:26.888255 W | etcdserver: read-only range request "key:\"/registry/flowschemas/exempt\" " with result "range_response_count:1 size:879" took too long (866.035216ms) to execute
	2021-08-13 00:23:26.891763 W | etcdserver: read-only range request "key:\"/registry/minions/pause-20210813001951-679351\" " with result "range_response_count:1 size:4776" took too long (869.868576ms) to execute
	2021-08-13 00:23:26.892452 W | etcdserver: read-only range request "key:\"/registry/masterleases/\" range_end:\"/registry/masterleases0\" " with result "range_response_count:0 size:5" took too long (870.451036ms) to execute
	2021-08-13 00:23:26.892806 W | etcdserver: read-only range request "key:\"/registry/priorityclasses/system-cluster-critical\" " with result "range_response_count:1 size:476" took too long (870.772194ms) to execute
	2021-08-13 00:23:26.893782 W | etcdserver: read-only range request "key:\"/registry/health\" " with result "range_response_count:0 size:5" took too long (353.420112ms) to execute
	2021-08-13 00:23:27.618510 W | etcdserver: read-only range request "key:\"/registry/events/kube-system/kube-proxy-2mkpr.169ab5d1b9f0872d\" " with result "range_response_count:1 size:826" took too long (312.125535ms) to execute
	2021-08-13 00:23:27.618804 W | etcdserver: read-only range request "key:\"/registry/clusterroles/system:controller:certificate-controller\" " with result "range_response_count:1 size:1142" took too long (324.375348ms) to execute
	2021-08-13 00:23:27.619230 W | etcdserver: read-only range request "key:\"/registry/health\" " with result "range_response_count:0 size:5" took too long (251.508486ms) to execute
	2021-08-13 00:23:35.057151 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2021-08-13 00:23:44.208010 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2021-08-13 00:23:54.207487 I | etcdserver/api/etcdhttp: /health OK (status code 200)
	2021-08-13 00:24:03.753011 W | etcdserver: read-only range request "key:\"/registry/services/endpoints/kube-system/k8s.io-minikube-hostpath\" " with result "range_response_count:1 size:1125" took too long (341.385574ms) to execute
	WARNING: 2021/08/13 00:24:07 grpc: Server.processUnaryRPC failed to write status: connection error: desc = "transport is closing"
	2021-08-13 00:24:09.122960 W | wal: sync duration of 3.322047764s, expected less than 1s
	2021-08-13 00:24:09.124505 W | etcdserver: read-only range request "key:\"/registry/namespaces/default\" " with result "range_response_count:1 size:341" took too long (1.247717726s) to execute
	2021-08-13 00:24:09.125866 W | etcdserver: read-only range request "key:\"/registry/services/endpoints/kube-system/k8s.io-minikube-hostpath\" " with result "range_response_count:1 size:1125" took too long (1.315223316s) to execute
	2021-08-13 00:24:09.686753 W | etcdserver: request "header:<ID:12242045188557790983 username:\"kube-apiserver-etcd-client\" auth_revision:1 > txn:<compare:<target:MOD key:\"/registry/masterleases/192.168.127.196\" mod_revision:580 > success:<request_put:<key:\"/registry/masterleases/192.168.127.196\" value_size:70 lease:3018673151703015173 >> failure:<request_range:<key:\"/registry/masterleases/192.168.127.196\" > >>" with result "size:16" took too long (348.846917ms) to execute
	
	* 
	* ==> kernel <==
	*  00:24:13 up 3 min,  0 users,  load average: 1.63, 0.95, 0.39
	Linux pause-20210813001951-679351 4.19.182 #1 SMP Fri Aug 6 09:11:32 UTC 2021 x86_64 GNU/Linux
	PRETTY_NAME="Buildroot 2020.02.12"
	
	* 
	* ==> kube-apiserver [3874ae5baf2d856570fa5534f52778b464323afbd92eca54de5a983517dbbb65] <==
	* Trace[265158055]: [552.827013ms] [552.827013ms] END
	I0813 00:22:48.803967       1 trace.go:205] Trace[40150974]: "GuaranteedUpdate etcd3" type:*apps.Deployment (13-Aug-2021 00:22:48.035) (total time: 768ms):
	Trace[40150974]: ---"Transaction committed" 766ms (00:22:00.803)
	Trace[40150974]: [768.574416ms] [768.574416ms] END
	I0813 00:22:48.805449       1 trace.go:205] Trace[38850226]: "Update" url:/apis/apps/v1/namespaces/kube-system/deployments/coredns/status,user-agent:kube-controller-manager/v1.21.3 (linux/amd64) kubernetes/ca643a4/system:serviceaccount:kube-system:deployment-controller,client:192.168.127.196,accept:application/vnd.kubernetes.protobuf, */*,protocol:HTTP/2.0 (13-Aug-2021 00:22:48.034) (total time: 770ms):
	Trace[38850226]: ---"Object stored in database" 769ms (00:22:00.805)
	Trace[38850226]: [770.627081ms] [770.627081ms] END
	I0813 00:22:48.807911       1 trace.go:205] Trace[1153692894]: "GuaranteedUpdate etcd3" type:*discovery.EndpointSlice (13-Aug-2021 00:22:48.029) (total time: 777ms):
	Trace[1153692894]: ---"Transaction committed" 776ms (00:22:00.807)
	Trace[1153692894]: [777.942281ms] [777.942281ms] END
	I0813 00:22:48.811870       1 trace.go:205] Trace[2055050687]: "Get" url:/api/v1/nodes/pause-20210813001951-679351,user-agent:minikube-linux-amd64/v0.0.0 (linux/amd64) kubernetes/$Format,client:192.168.127.1,accept:application/json, */*,protocol:HTTP/2.0 (13-Aug-2021 00:22:48.030) (total time: 780ms):
	Trace[2055050687]: ---"About to write a response" 779ms (00:22:00.810)
	Trace[2055050687]: [780.446725ms] [780.446725ms] END
	I0813 00:22:48.815873       1 trace.go:205] Trace[1709682153]: "Update" url:/apis/discovery.k8s.io/v1/namespaces/kube-system/endpointslices/kube-dns-5fjzs,user-agent:kube-controller-manager/v1.21.3 (linux/amd64) kubernetes/ca643a4/system:serviceaccount:kube-system:endpointslice-controller,client:192.168.127.196,accept:application/vnd.kubernetes.protobuf, */*,protocol:HTTP/2.0 (13-Aug-2021 00:22:48.029) (total time: 786ms):
	Trace[1709682153]: ---"Object stored in database" 785ms (00:22:00.815)
	Trace[1709682153]: [786.028644ms] [786.028644ms] END
	I0813 00:22:48.812641       1 trace.go:205] Trace[1460083222]: "GuaranteedUpdate etcd3" type:*core.Endpoints (13-Aug-2021 00:22:48.029) (total time: 783ms):
	Trace[1460083222]: ---"Transaction committed" 782ms (00:22:00.812)
	Trace[1460083222]: [783.305236ms] [783.305236ms] END
	I0813 00:22:48.819982       1 trace.go:205] Trace[920420538]: "Update" url:/api/v1/namespaces/kube-system/endpoints/kube-dns,user-agent:kube-controller-manager/v1.21.3 (linux/amd64) kubernetes/ca643a4/system:serviceaccount:kube-system:endpoint-controller,client:192.168.127.196,accept:application/vnd.kubernetes.protobuf, */*,protocol:HTTP/2.0 (13-Aug-2021 00:22:48.029) (total time: 790ms):
	Trace[920420538]: ---"Object stored in database" 790ms (00:22:00.819)
	Trace[920420538]: [790.783982ms] [790.783982ms] END
	I0813 00:23:04.050873       1 client.go:360] parsed scheme: "passthrough"
	I0813 00:23:04.051445       1 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{https://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
	I0813 00:23:04.051754       1 clientconn.go:948] ClientConn switching balancer to "pick_first"
	
	* 
	* ==> kube-apiserver [85bd885bbae1eb206d16fda70dde9e78726f0563495bc0c5f64cf2083b9a7bf7] <==
	* I0813 00:23:26.905285       1 storage_scheduling.go:148] all system priority classes are created successfully or already exist.
	I0813 00:23:29.331854       1 controller.go:611] quota admission added evaluator for: serviceaccounts
	I0813 00:23:29.366533       1 controller.go:611] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0813 00:23:29.384370       1 controller.go:611] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0813 00:23:29.403160       1 controller.go:611] quota admission added evaluator for: endpoints
	I0813 00:23:29.518332       1 controller.go:611] quota admission added evaluator for: events.events.k8s.io
	I0813 00:23:57.987189       1 controller.go:611] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0813 00:24:01.430291       1 client.go:360] parsed scheme: "passthrough"
	I0813 00:24:01.430491       1 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{https://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
	I0813 00:24:01.430526       1 clientconn.go:948] ClientConn switching balancer to "pick_first"
	E0813 00:24:07.259737       1 status.go:71] apiserver received an error that is not an metav1.Status: &errors.errorString{s:"context canceled"}: context canceled
	E0813 00:24:07.259860       1 status.go:71] apiserver received an error that is not an metav1.Status: &errors.errorString{s:"client disconnected"}: client disconnected
	E0813 00:24:07.263006       1 writers.go:117] apiserver was unable to write a JSON response: http: Handler timeout
	E0813 00:24:07.263568       1 wrap.go:54] timeout or abort while handling: GET "/apis/storage.k8s.io/v1/csinodes/pause-20210813001951-679351"
	E0813 00:24:07.265357       1 status.go:71] apiserver received an error that is not an metav1.Status: &errors.errorString{s:"http: Handler timeout"}: http: Handler timeout
	E0813 00:24:07.268867       1 writers.go:130] apiserver was unable to write a fallback JSON response: http: Handler timeout
	I0813 00:24:09.127818       1 trace.go:205] Trace[986017853]: "Get" url:/api/v1/namespaces/default,user-agent:kube-apiserver/v1.21.3 (linux/amd64) kubernetes/ca643a4,client:127.0.0.1,accept:application/vnd.kubernetes.protobuf, */*,protocol:HTTP/2.0 (13-Aug-2021 00:24:07.874) (total time: 1253ms):
	Trace[986017853]: ---"About to write a response" 1253ms (00:24:00.127)
	Trace[986017853]: [1.253446276s] [1.253446276s] END
	I0813 00:24:09.129569       1 trace.go:205] Trace[1940889522]: "Get" url:/api/v1/namespaces/kube-system/endpoints/k8s.io-minikube-hostpath,user-agent:storage-provisioner/v0.0.0 (linux/amd64) kubernetes/$Format,client:192.168.127.196,accept:application/json, */*,protocol:HTTP/2.0 (13-Aug-2021 00:24:07.809) (total time: 1319ms):
	Trace[1940889522]: ---"About to write a response" 1319ms (00:24:00.129)
	Trace[1940889522]: [1.319757496s] [1.319757496s] END
	I0813 00:24:09.688489       1 trace.go:205] Trace[1106331583]: "GuaranteedUpdate etcd3" type:*v1.Endpoints (13-Aug-2021 00:24:09.160) (total time: 527ms):
	Trace[1106331583]: ---"Transaction committed" 523ms (00:24:00.688)
	Trace[1106331583]: [527.566261ms] [527.566261ms] END
	
	* 
	* ==> kube-controller-manager [9cb0b80b9734ad17c461045a38b0d0a6b8d8686ef46bf8dd99a00d62b8cc67fe] <==
	* 	/usr/local/go/src/bytes/buffer.go:204 +0xbe
	crypto/tls.(*Conn).readFromUntil(0xc000126e00, 0x500dda0, 0xc0008fa500, 0x5, 0xc0008fa500, 0x431)
		/usr/local/go/src/crypto/tls/conn.go:798 +0xf3
	crypto/tls.(*Conn).readRecordOrCCS(0xc000126e00, 0x0, 0x0, 0x0)
		/usr/local/go/src/crypto/tls/conn.go:605 +0x115
	crypto/tls.(*Conn).readRecord(...)
		/usr/local/go/src/crypto/tls/conn.go:573
	crypto/tls.(*Conn).Read(0xc000126e00, 0xc000be3000, 0x1000, 0x1000, 0x0, 0x0, 0x0)
		/usr/local/go/src/crypto/tls/conn.go:1276 +0x165
	bufio.(*Reader).Read(0xc0001c8180, 0xc0002631b8, 0x9, 0x9, 0x9b69eb, 0xc001059c78, 0x4071a5)
		/usr/local/go/src/bufio/bufio.go:227 +0x222
	io.ReadAtLeast(0x5007a00, 0xc0001c8180, 0xc0002631b8, 0x9, 0x9, 0x9, 0xc000c0fec0, 0x4c5c995259c000, 0xc000c0fec0)
		/usr/local/go/src/io/io.go:328 +0x87
	io.ReadFull(...)
		/usr/local/go/src/io/io.go:347
	k8s.io/kubernetes/vendor/golang.org/x/net/http2.readFrameHeader(0xc0002631b8, 0x9, 0x9, 0x5007a00, 0xc0001c8180, 0x0, 0x0, 0x0, 0x0)
		/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/golang.org/x/net/http2/frame.go:237 +0x89
	k8s.io/kubernetes/vendor/golang.org/x/net/http2.(*Framer).ReadFrame(0xc000263180, 0xc00107d3b0, 0x0, 0x0, 0x0)
		/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/golang.org/x/net/http2/frame.go:492 +0xa5
	k8s.io/kubernetes/vendor/golang.org/x/net/http2.(*clientConnReadLoop).run(0xc001059fa8, 0x0, 0x0)
		/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/golang.org/x/net/http2/transport.go:1819 +0xd8
	k8s.io/kubernetes/vendor/golang.org/x/net/http2.(*ClientConn).readLoop(0xc000001200)
		/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/golang.org/x/net/http2/transport.go:1741 +0x6f
	created by k8s.io/kubernetes/vendor/golang.org/x/net/http2.(*Transport).newClientConn
		/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/golang.org/x/net/http2/transport.go:705 +0x6c5
	
	* 
	* ==> kube-controller-manager [a4a4ec1132e56e93b77cf82333943e5daaab8efa48df65ced586f5a324cde37d] <==
	* I0813 00:23:57.943260       1 shared_informer.go:247] Caches are synced for ReplicaSet 
	I0813 00:23:57.947953       1 shared_informer.go:247] Caches are synced for deployment 
	I0813 00:23:57.955167       1 shared_informer.go:247] Caches are synced for namespace 
	I0813 00:23:57.955304       1 shared_informer.go:247] Caches are synced for TTL 
	I0813 00:23:57.957499       1 shared_informer.go:247] Caches are synced for cronjob 
	I0813 00:23:57.961016       1 shared_informer.go:240] Waiting for caches to sync for garbage collector
	I0813 00:23:57.962629       1 shared_informer.go:247] Caches are synced for HPA 
	I0813 00:23:57.965129       1 shared_informer.go:247] Caches are synced for job 
	I0813 00:23:57.967122       1 shared_informer.go:247] Caches are synced for certificate-csrsigning-kubelet-serving 
	I0813 00:23:57.971271       1 shared_informer.go:247] Caches are synced for certificate-csrsigning-kubelet-client 
	I0813 00:23:57.972709       1 shared_informer.go:247] Caches are synced for certificate-csrsigning-legacy-unknown 
	I0813 00:23:57.974698       1 shared_informer.go:247] Caches are synced for TTL after finished 
	I0813 00:23:57.982328       1 shared_informer.go:247] Caches are synced for certificate-csrapproving 
	I0813 00:23:57.982525       1 shared_informer.go:247] Caches are synced for certificate-csrsigning-kube-apiserver-client 
	I0813 00:23:57.982606       1 shared_informer.go:247] Caches are synced for stateful set 
	I0813 00:23:57.984923       1 shared_informer.go:247] Caches are synced for bootstrap_signer 
	I0813 00:23:57.989195       1 shared_informer.go:247] Caches are synced for persistent volume 
	I0813 00:23:57.993635       1 shared_informer.go:247] Caches are synced for crt configmap 
	I0813 00:23:58.072945       1 shared_informer.go:247] Caches are synced for disruption 
	I0813 00:23:58.073409       1 disruption.go:371] Sending events to api server.
	I0813 00:23:58.151201       1 shared_informer.go:247] Caches are synced for resource quota 
	I0813 00:23:58.160353       1 shared_informer.go:247] Caches are synced for resource quota 
	I0813 00:23:58.659003       1 shared_informer.go:247] Caches are synced for garbage collector 
	I0813 00:23:58.659723       1 garbagecollector.go:151] Garbage collector: all resource monitors have synced. Proceeding to collect garbage
	I0813 00:23:58.662160       1 shared_informer.go:247] Caches are synced for garbage collector 
	
	* 
	* ==> kube-proxy [6204b21ab9c2c6e799da39874e9eb93e39284c65231dd05603088db2fa6b8e6b] <==
	* I0813 00:23:26.907583       1 node.go:172] Successfully retrieved node IP: 192.168.127.196
	I0813 00:23:26.907779       1 server_others.go:140] Detected node IP 192.168.127.196
	W0813 00:23:26.907836       1 server_others.go:598] Unknown proxy mode "", assuming iptables proxy
	W0813 00:23:27.211577       1 server_others.go:197] No iptables support for IPv6: exit status 3
	I0813 00:23:27.211713       1 server_others.go:208] kube-proxy running in single-stack IPv4 mode
	I0813 00:23:27.211745       1 server_others.go:212] Using iptables Proxier.
	I0813 00:23:27.212315       1 server.go:643] Version: v1.21.3
	I0813 00:23:27.215676       1 config.go:315] Starting service config controller
	I0813 00:23:27.215816       1 shared_informer.go:240] Waiting for caches to sync for service config
	I0813 00:23:27.215853       1 config.go:224] Starting endpoint slice config controller
	I0813 00:23:27.215859       1 shared_informer.go:240] Waiting for caches to sync for endpoint slice config
	W0813 00:23:27.234006       1 warnings.go:70] discovery.k8s.io/v1beta1 EndpointSlice is deprecated in v1.21+, unavailable in v1.25+; use discovery.k8s.io/v1 EndpointSlice
	W0813 00:23:27.237301       1 warnings.go:70] discovery.k8s.io/v1beta1 EndpointSlice is deprecated in v1.21+, unavailable in v1.25+; use discovery.k8s.io/v1 EndpointSlice
	I0813 00:23:27.316802       1 shared_informer.go:247] Caches are synced for endpoint slice config 
	I0813 00:23:27.316986       1 shared_informer.go:247] Caches are synced for service config 
	
	* 
	* ==> kube-proxy [c11b8a977685bc2516a3a180b7d7e5a078649d5b9c68db67af64bdbf0438193c] <==
	* I0813 00:22:08.526654       1 node.go:172] Successfully retrieved node IP: 192.168.127.196
	I0813 00:22:08.527015       1 server_others.go:140] Detected node IP 192.168.127.196
	W0813 00:22:08.527394       1 server_others.go:598] Unknown proxy mode "", assuming iptables proxy
	W0813 00:22:08.644892       1 server_others.go:197] No iptables support for IPv6: exit status 3
	I0813 00:22:08.644915       1 server_others.go:208] kube-proxy running in single-stack IPv4 mode
	I0813 00:22:08.644934       1 server_others.go:212] Using iptables Proxier.
	I0813 00:22:08.647847       1 server.go:643] Version: v1.21.3
	I0813 00:22:08.651170       1 config.go:315] Starting service config controller
	I0813 00:22:08.651751       1 shared_informer.go:240] Waiting for caches to sync for service config
	I0813 00:22:08.657258       1 config.go:224] Starting endpoint slice config controller
	I0813 00:22:08.657749       1 shared_informer.go:240] Waiting for caches to sync for endpoint slice config
	W0813 00:22:08.659509       1 warnings.go:70] discovery.k8s.io/v1beta1 EndpointSlice is deprecated in v1.21+, unavailable in v1.25+; use discovery.k8s.io/v1 EndpointSlice
	W0813 00:22:08.695660       1 warnings.go:70] discovery.k8s.io/v1beta1 EndpointSlice is deprecated in v1.21+, unavailable in v1.25+; use discovery.k8s.io/v1 EndpointSlice
	I0813 00:22:08.752469       1 shared_informer.go:247] Caches are synced for service config 
	I0813 00:22:08.759025       1 shared_informer.go:247] Caches are synced for endpoint slice config 
	
	* 
	* ==> kube-scheduler [3afc8c09f828616463f8d4246cdb7a602c45569e04de078f3b507b5df49993e8] <==
	* E0813 00:21:42.747204       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0813 00:21:42.747540       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0813 00:21:42.748267       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0813 00:21:42.748530       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0813 00:21:42.749032       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0813 00:21:42.749616       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0813 00:21:42.747557       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0813 00:21:42.750598       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1beta1.CSIStorageCapacity: failed to list *v1beta1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0813 00:21:42.751707       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0813 00:21:43.586759       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0813 00:21:43.586851       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0813 00:21:43.611677       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0813 00:21:43.619757       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0813 00:21:43.758571       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1beta1.CSIStorageCapacity: failed to list *v1beta1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0813 00:21:43.808629       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0813 00:21:43.858463       1 reflector.go:138] k8s.io/apiserver/pkg/server/dynamiccertificates/configmap_cafile_content.go:206: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0813 00:21:43.874015       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0813 00:21:43.903878       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0813 00:21:43.932330       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0813 00:21:44.053954       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0813 00:21:44.184579       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0813 00:21:44.276152       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0813 00:21:44.276696       1 reflector.go:138] k8s.io/client-go/informers/factory.go:134: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0813 00:21:45.625656       1 reflector.go:138] k8s.io/apiserver/pkg/server/dynamiccertificates/configmap_cafile_content.go:206: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	I0813 00:21:51.218463       1 shared_informer.go:247] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file 
	
	* 
	* ==> kube-scheduler [64c935095bced983323337af866a47ac732cfe3496f8dfd31387b8833f7cc6c0] <==
	* I0813 00:23:17.339713       1 serving.go:347] Generated self-signed cert in-memory
	W0813 00:23:24.103838       1 requestheader_controller.go:193] Unable to get configmap/extension-apiserver-authentication in kube-system.  Usually fixed by 'kubectl create rolebinding -n kube-system ROLEBINDING_NAME --role=extension-apiserver-authentication-reader --serviceaccount=YOUR_NS:YOUR_SA'
	W0813 00:23:24.103977       1 authentication.go:337] Error looking up in-cluster authentication configuration: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot get resource "configmaps" in API group "" in the namespace "kube-system"
	W0813 00:23:24.103989       1 authentication.go:338] Continuing without authentication configuration. This may treat all requests as anonymous.
	W0813 00:23:24.103997       1 authentication.go:339] To require authentication configuration lookup to succeed, set --authentication-tolerate-lookup-failure=false
	I0813 00:23:24.177616       1 secure_serving.go:197] Serving securely on 127.0.0.1:10259
	I0813 00:23:24.178246       1 tlsconfig.go:240] Starting DynamicServingCertificateController
	I0813 00:23:24.179347       1 configmap_cafile_content.go:202] Starting client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0813 00:23:24.196646       1 shared_informer.go:240] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0813 00:23:24.304977       1 shared_informer.go:247] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file 
	
	* 
	* ==> kubelet <==
	* -- Logs begin at Fri 2021-08-13 00:20:51 UTC, end at Fri 2021-08-13 00:24:14 UTC. --
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.907271    5078 server.go:660] "--cgroups-per-qos enabled, but --cgroup-root was not specified.  defaulting to /"
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.907671    5078 container_manager_linux.go:278] "Container manager verified user specified cgroup-root exists" cgroupRoot=[]
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.907807    5078 container_manager_linux.go:283] "Creating Container Manager object based on Node Config" nodeConfig={RuntimeCgroupsName: SystemCgroupsName: KubeletCgroupsName: ContainerRuntime:remote CgroupsPerQOS:true CgroupRoot:/ CgroupDriver:cgroupfs KubeletRootDir:/var/lib/kubelet ProtectKernelDefaults:false NodeAllocatableConfig:{KubeReservedCgroupName: SystemReservedCgroupName: ReservedSystemCPUs: EnforceNodeAllocatable:map[pods:{}] KubeReserved:map[] SystemReserved:map[] HardEvictionThresholds:[]} QOSReserved:map[] ExperimentalCPUManagerPolicy:none ExperimentalTopologyManagerScope:container ExperimentalCPUManagerReconcilePeriod:10s ExperimentalMemoryManagerPolicy:None ExperimentalMemoryManagerReservedMemory:[] ExperimentalPodPidsLimit:-1 EnforceCPULimits:true CPUCFSQuotaPeriod:100ms ExperimentalTopologyManagerPolicy:none}
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.907872    5078 topology_manager.go:120] "Creating topology manager with policy per scope" topologyPolicyName="none" topologyScopeName="container"
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.907885    5078 container_manager_linux.go:314] "Initializing Topology Manager" policy="none" scope="container"
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.907893    5078 container_manager_linux.go:319] "Creating device plugin manager" devicePluginEnabled=true
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908017    5078 remote_runtime.go:62] parsed scheme: ""
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908027    5078 remote_runtime.go:62] scheme "" not registered, fallback to default scheme
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908151    5078 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{/run/containerd/containerd.sock  <nil> 0 <nil>}] <nil> <nil>}
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908169    5078 clientconn.go:948] ClientConn switching balancer to "pick_first"
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908324    5078 remote_image.go:50] parsed scheme: ""
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908333    5078 remote_image.go:50] scheme "" not registered, fallback to default scheme
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908348    5078 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{/run/containerd/containerd.sock  <nil> 0 <nil>}] <nil> <nil>}
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908359    5078 clientconn.go:948] ClientConn switching balancer to "pick_first"
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908467    5078 kubelet.go:404] "Attempting to sync node with API server"
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908488    5078 kubelet.go:272] "Adding static pod path" path="/etc/kubernetes/manifests"
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908521    5078 kubelet.go:283] "Adding apiserver pod source"
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.908555    5078 apiserver.go:42] "Waiting for node sync before watching apiserver pods"
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.909189    5078 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
	Aug 13 00:24:06 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:06.913695    5078 kuberuntime_manager.go:222] "Container runtime initialized" containerRuntime="containerd" version="v1.4.9" apiVersion="v1alpha2"
	Aug 13 00:24:07 pause-20210813001951-679351 kubelet[5078]: E0813 00:24:07.237758    5078 aws_credentials.go:77] while getting AWS credentials NoCredentialProviders: no valid providers in chain. Deprecated.
	Aug 13 00:24:07 pause-20210813001951-679351 kubelet[5078]:         For verbose messaging see aws.Config.CredentialsChainVerboseErrors
	Aug 13 00:24:07 pause-20210813001951-679351 kubelet[5078]: I0813 00:24:07.244131    5078 server.go:1190] "Started kubelet"
	Aug 13 00:24:07 pause-20210813001951-679351 systemd[1]: kubelet.service: Succeeded.
	Aug 13 00:24:07 pause-20210813001951-679351 systemd[1]: Stopped kubelet: The Kubernetes Node Agent.
	
	* 
	* ==> storage-provisioner [7899222a89a71ffe4fb8d232a5b3799b5c94ea7e430406ce44654a4c80946ed9] <==
	* I0813 00:23:30.839269       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0813 00:23:30.879440       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0813 00:23:30.880848       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0813 00:23:30.925896       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0813 00:23:30.926952       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_pause-20210813001951-679351_90bbf71c-5385-47c0-853d-3e2fde5ecc99!
	I0813 00:23:30.936151       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"44c00147-d6a4-4a55-bec0-85ce7cb56602", APIVersion:"v1", ResourceVersion:"544", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' pause-20210813001951-679351_90bbf71c-5385-47c0-853d-3e2fde5ecc99 became leader
	I0813 00:23:31.046717       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_pause-20210813001951-679351_90bbf71c-5385-47c0-853d-3e2fde5ecc99!
	

                                                
                                                
-- /stdout --
helpers_test.go:255: (dbg) Run:  out/minikube-linux-amd64 status --format={{.APIServer}} -p pause-20210813001951-679351 -n pause-20210813001951-679351
helpers_test.go:255: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.APIServer}} -p pause-20210813001951-679351 -n pause-20210813001951-679351: exit status 2 (294.338092ms)

                                                
                                                
-- stdout --
	Running

                                                
                                                
-- /stdout --
helpers_test.go:255: status error: exit status 2 (may be ok)
helpers_test.go:262: (dbg) Run:  kubectl --context pause-20210813001951-679351 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:271: non-running pods: 
helpers_test.go:273: ======> post-mortem[TestPause/serial/PauseAgain]: describe non-running pods <======
helpers_test.go:276: (dbg) Run:  kubectl --context pause-20210813001951-679351 describe pod 
helpers_test.go:276: (dbg) Non-zero exit: kubectl --context pause-20210813001951-679351 describe pod : exit status 1 (61.416172ms)

                                                
                                                
** stderr ** 
	error: resource name may not be empty

                                                
                                                
** /stderr **
helpers_test.go:278: kubectl --context pause-20210813001951-679351 describe pod : exit status 1
--- FAIL: TestPause/serial/PauseAgain (10.84s)

                                                
                                    
x
+
TestNetworkPlugins/group/calico/Start (580.22s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/calico/Start
net_test.go:98: (dbg) Run:  out/minikube-linux-amd64 start -p calico-20210813002105-679351 --memory=2048 --alsologtostderr --wait=true --wait-timeout=5m --cni=calico --driver=kvm2  --container-runtime=containerd

                                                
                                                
=== CONT  TestNetworkPlugins/group/calico/Start
net_test.go:98: (dbg) Non-zero exit: out/minikube-linux-amd64 start -p calico-20210813002105-679351 --memory=2048 --alsologtostderr --wait=true --wait-timeout=5m --cni=calico --driver=kvm2  --container-runtime=containerd: exit status 80 (9m40.192468484s)

                                                
                                                
-- stdout --
	* [calico-20210813002105-679351] minikube v1.22.0 on Debian 9.13 (kvm/amd64)
	  - KUBECONFIG=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/kubeconfig
	  - MINIKUBE_BIN=out/minikube-linux-amd64
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube
	  - MINIKUBE_LOCATION=12230
	* Using the kvm2 driver based on user configuration
	* Starting control plane node calico-20210813002105-679351 in cluster calico-20210813002105-679351
	* Creating kvm2 VM (CPUs=2, Memory=2048MB, Disk=20000MB) ...
	* Preparing Kubernetes v1.21.3 on containerd 1.4.9 ...
	  - Generating certificates and keys ...
	  - Booting up control plane ...
	  - Configuring RBAC rules ...
	* Configuring Calico (Container Networking Interface) ...
	* Verifying Kubernetes components...
	  - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	* Enabled addons: default-storageclass, storage-provisioner
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0813 00:25:50.080953  718671 out.go:298] Setting OutFile to fd 1 ...
	I0813 00:25:50.081050  718671 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0813 00:25:50.081063  718671 out.go:311] Setting ErrFile to fd 2...
	I0813 00:25:50.081068  718671 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0813 00:25:50.081197  718671 root.go:313] Updating PATH: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/bin
	I0813 00:25:50.081586  718671 out.go:305] Setting JSON to false
	I0813 00:25:50.120191  718671 start.go:111] hostinfo: {"hostname":"debian-jenkins-agent-10","uptime":14913,"bootTime":1628799437,"procs":198,"os":"linux","platform":"debian","platformFamily":"debian","platformVersion":"9.13","kernelVersion":"4.9.0-16-amd64","kernelArch":"x86_64","virtualizationSystem":"kvm","virtualizationRole":"guest","hostId":"c29e0b88-ef83-6765-d2fa-208fdce1af32"}
	I0813 00:25:50.120319  718671 start.go:121] virtualization: kvm guest
	I0813 00:25:50.123033  718671 out.go:177] * [calico-20210813002105-679351] minikube v1.22.0 on Debian 9.13 (kvm/amd64)
	I0813 00:25:50.124526  718671 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/kubeconfig
	I0813 00:25:50.123215  718671 notify.go:169] Checking for updates...
	I0813 00:25:50.125967  718671 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-amd64
	I0813 00:25:50.127401  718671 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube
	I0813 00:25:50.128815  718671 out.go:177]   - MINIKUBE_LOCATION=12230
	I0813 00:25:50.129537  718671 driver.go:335] Setting default libvirt URI to qemu:///system
	I0813 00:25:50.165302  718671 out.go:177] * Using the kvm2 driver based on user configuration
	I0813 00:25:50.165330  718671 start.go:278] selected driver: kvm2
	I0813 00:25:50.165338  718671 start.go:751] validating driver "kvm2" against <nil>
	I0813 00:25:50.165359  718671 start.go:762] status for kvm2: {Installed:true Healthy:true Running:true NeedsImprovement:false Error:<nil> Reason: Fix: Doc:}
	I0813 00:25:50.167786  718671 install.go:52] acquiring lock: {Name:mk900956b073697a4aa6c80a27c6bb0742a99a53 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0813 00:25:50.167966  718671 install.go:117] Validating docker-machine-driver-kvm2, PATH=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/bin:/home/jenkins/workspace/KVM_Linux_containerd_integration/out/:/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games:/usr/local/go/bin:/home/jenkins/go/bin:/usr/local/bin/:/usr/local/go/bin/:/home/jenkins/go/bin
	I0813 00:25:50.183063  718671 install.go:137] /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2 version is 1.22.0
	I0813 00:25:50.183126  718671 start_flags.go:263] no existing cluster config was found, will generate one from the flags 
	I0813 00:25:50.183303  718671 start_flags.go:697] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0813 00:25:50.183334  718671 cni.go:93] Creating CNI manager for "calico"
	I0813 00:25:50.183342  718671 start_flags.go:272] Found "Calico" CNI - setting NetworkPlugin=cni
	I0813 00:25:50.183353  718671 start_flags.go:277] config:
	{Name:calico-20210813002105-679351 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.25@sha256:6f936e3443b95cd918d77623bf7b595653bb382766e280290a02b4a349e88b79 Memory:2048 CPUs:2 DiskSize:20000 VMDriver: Driver:kvm2 HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.99.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.21.3 ClusterName:calico-20210813002105-679351 Namespace:default APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket
: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:calico NodeIP: NodePort:8443 NodeName:} Nodes:[] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:5m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: MultiNodeRequested:false ExtraDisks:0}
	I0813 00:25:50.183473  718671 iso.go:123] acquiring lock: {Name:mke80f4e00d5590a17349e0875191e5cd211cb9b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0813 00:25:50.185477  718671 out.go:177] * Starting control plane node calico-20210813002105-679351 in cluster calico-20210813002105-679351
	I0813 00:25:50.185533  718671 preload.go:131] Checking if preload exists for k8s version v1.21.3 and runtime containerd
	I0813 00:25:50.185565  718671 preload.go:147] Found local preload: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v11-v1.21.3-containerd-overlay2-amd64.tar.lz4
	I0813 00:25:50.185593  718671 cache.go:56] Caching tarball of preloaded images
	I0813 00:25:50.185731  718671 preload.go:173] Found /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v11-v1.21.3-containerd-overlay2-amd64.tar.lz4 in cache, skipping download
	I0813 00:25:50.185755  718671 cache.go:59] Finished verifying existence of preloaded tar for  v1.21.3 on containerd
	I0813 00:25:50.185924  718671 profile.go:148] Saving config to /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/config.json ...
	I0813 00:25:50.185957  718671 lock.go:36] WriteFile acquiring /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/config.json: {Name:mkab763c36498af1261c0bc1109ea8b6affd68c6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0813 00:25:50.186122  718671 cache.go:205] Successfully downloaded all kic artifacts
	I0813 00:25:50.186152  718671 start.go:313] acquiring machines lock for calico-20210813002105-679351: {Name:mk522658ca6319f8a1c60d46c1e97d60752e8eaa Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0813 00:26:06.721906  718671 start.go:317] acquired machines lock for "calico-20210813002105-679351" in 16.535733991s
	I0813 00:26:06.721957  718671 start.go:89] Provisioning new machine with config: &{Name:calico-20210813002105-679351 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube-builds/iso/12122/minikube-v1.22.0-1628238775-12122.iso KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.25@sha256:6f936e3443b95cd918d77623bf7b595653bb382766e280290a02b4a349e88b79 Memory:2048 CPUs:2 DiskSize:20000 VMDriver: Driver:kvm2 HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.99.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.21.3 Clu
sterName:calico-20210813002105-679351 Namespace:default APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:calico NodeIP: NodePort:8443 NodeName:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.21.3 ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:5m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: MultiNodeRequested:false ExtraDisks:0} &{Name: IP: Port:8443 KubernetesVersion:v1.21.3 ControlPlane:true Worker:true}
	I0813 00:26:06.722062  718671 start.go:126] createHost starting for "" (driver="kvm2")
	I0813 00:26:06.724532  718671 out.go:204] * Creating kvm2 VM (CPUs=2, Memory=2048MB, Disk=20000MB) ...
	I0813 00:26:06.724758  718671 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:26:06.724811  718671 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:26:06.739181  718671 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:35771
	I0813 00:26:06.739604  718671 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:26:06.740189  718671 main.go:130] libmachine: Using API Version  1
	I0813 00:26:06.740217  718671 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:26:06.740515  718671 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:26:06.740708  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetMachineName
	I0813 00:26:06.740839  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .DriverName
	I0813 00:26:06.740977  718671 start.go:160] libmachine.API.Create for "calico-20210813002105-679351" (driver="kvm2")
	I0813 00:26:06.741015  718671 client.go:168] LocalClient.Create starting
	I0813 00:26:06.741056  718671 main.go:130] libmachine: Reading certificate data from /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/ca.pem
	I0813 00:26:06.741089  718671 main.go:130] libmachine: Decoding PEM data...
	I0813 00:26:06.741112  718671 main.go:130] libmachine: Parsing certificate...
	I0813 00:26:06.741258  718671 main.go:130] libmachine: Reading certificate data from /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/cert.pem
	I0813 00:26:06.741289  718671 main.go:130] libmachine: Decoding PEM data...
	I0813 00:26:06.741310  718671 main.go:130] libmachine: Parsing certificate...
	I0813 00:26:06.741371  718671 main.go:130] libmachine: Running pre-create checks...
	I0813 00:26:06.741387  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .PreCreateCheck
	I0813 00:26:06.741753  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetConfigRaw
	I0813 00:26:06.742215  718671 main.go:130] libmachine: Creating machine...
	I0813 00:26:06.742237  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .Create
	I0813 00:26:06.742365  718671 main.go:130] libmachine: (calico-20210813002105-679351) Creating KVM machine...
	I0813 00:26:06.745091  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | found existing default KVM network
	I0813 00:26:06.747078  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:06.746913  718922 network.go:240] skipping subnet 192.168.39.0/24 that is taken: &{IP:192.168.39.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.39.0/24 Gateway:192.168.39.1 ClientMin:192.168.39.2 ClientMax:192.168.39.254 Broadcast:192.168.39.255 Interface:{IfaceName:virbr6 IfaceIPv4:192.168.39.1 IfaceMTU:1500 IfaceMAC:52:54:00:c9:12:3a}}
	I0813 00:26:06.749680  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:06.749586  718922 network.go:240] skipping subnet 192.168.50.0/24 that is taken: &{IP:192.168.50.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.50.0/24 Gateway:192.168.50.1 ClientMin:192.168.50.2 ClientMax:192.168.50.254 Broadcast:192.168.50.255 Interface:{IfaceName:virbr5 IfaceIPv4:192.168.50.1 IfaceMTU:1500 IfaceMAC:52:54:00:e7:55:97}}
	I0813 00:26:06.751300  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:06.751229  718922 network.go:240] skipping subnet 192.168.61.0/24 that is taken: &{IP:192.168.61.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.61.0/24 Gateway:192.168.61.1 ClientMin:192.168.61.2 ClientMax:192.168.61.254 Broadcast:192.168.61.255 Interface:{IfaceName:virbr3 IfaceIPv4:192.168.61.1 IfaceMTU:1500 IfaceMAC:52:54:00:ef:5a:89}}
	I0813 00:26:06.755517  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:06.755409  718922 network.go:240] skipping subnet 192.168.72.0/24 that is taken: &{IP:192.168.72.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.72.0/24 Gateway:192.168.72.1 ClientMin:192.168.72.2 ClientMax:192.168.72.254 Broadcast:192.168.72.255 Interface:{IfaceName:virbr4 IfaceIPv4:192.168.72.1 IfaceMTU:1500 IfaceMAC:52:54:00:3b:67:48}}
	I0813 00:26:06.756784  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:06.756694  718922 network.go:240] skipping subnet 192.168.83.0/24 that is taken: &{IP:192.168.83.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.83.0/24 Gateway:192.168.83.1 ClientMin:192.168.83.2 ClientMax:192.168.83.254 Broadcast:192.168.83.255 Interface:{IfaceName:virbr9 IfaceIPv4:192.168.83.1 IfaceMTU:1500 IfaceMAC:52:54:00:2a:df:7d}}
	I0813 00:26:06.758641  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:06.758555  718922 network.go:240] skipping subnet 192.168.94.0/24 that is taken: &{IP:192.168.94.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.94.0/24 Gateway:192.168.94.1 ClientMin:192.168.94.2 ClientMax:192.168.94.254 Broadcast:192.168.94.255 Interface:{IfaceName:virbr10 IfaceIPv4:192.168.94.1 IfaceMTU:1500 IfaceMAC:52:54:00:56:7f:e8}}
	I0813 00:26:06.760295  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:06.760199  718922 network.go:240] skipping subnet 192.168.105.0/24 that is taken: &{IP:192.168.105.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.105.0/24 Gateway:192.168.105.1 ClientMin:192.168.105.2 ClientMax:192.168.105.254 Broadcast:192.168.105.255 Interface:{IfaceName:virbr7 IfaceIPv4:192.168.105.1 IfaceMTU:1500 IfaceMAC:52:54:00:46:ee:b2}}
	I0813 00:26:06.762482  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:06.762383  718922 network.go:240] skipping subnet 192.168.116.0/24 that is taken: &{IP:192.168.116.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.116.0/24 Gateway:192.168.116.1 ClientMin:192.168.116.2 ClientMax:192.168.116.254 Broadcast:192.168.116.255 Interface:{IfaceName:virbr8 IfaceIPv4:192.168.116.1 IfaceMTU:1500 IfaceMAC:52:54:00:79:1a:dc}}
	I0813 00:26:06.764399  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:06.764305  718922 network.go:240] skipping subnet 192.168.127.0/24 that is taken: &{IP:192.168.127.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.127.0/24 Gateway:192.168.127.1 ClientMin:192.168.127.2 ClientMax:192.168.127.254 Broadcast:192.168.127.255 Interface:{IfaceName:virbr11 IfaceIPv4:192.168.127.1 IfaceMTU:1500 IfaceMAC:52:54:00:f1:c1:ab}}
	I0813 00:26:06.765571  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:06.765470  718922 network.go:240] skipping subnet 192.168.138.0/24 that is taken: &{IP:192.168.138.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.138.0/24 Gateway:192.168.138.1 ClientMin:192.168.138.2 ClientMax:192.168.138.254 Broadcast:192.168.138.255 Interface:{IfaceName:virbr12 IfaceIPv4:192.168.138.1 IfaceMTU:1500 IfaceMAC:52:54:00:ea:8d:f9}}
	I0813 00:26:06.766436  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:06.766348  718922 network.go:240] skipping subnet 192.168.149.0/24 that is taken: &{IP:192.168.149.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.149.0/24 Gateway:192.168.149.1 ClientMin:192.168.149.2 ClientMax:192.168.149.254 Broadcast:192.168.149.255 Interface:{IfaceName:virbr13 IfaceIPv4:192.168.149.1 IfaceMTU:1500 IfaceMAC:52:54:00:35:d3:8b}}
	I0813 00:26:06.768972  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:06.768863  718922 network.go:288] reserving subnet 192.168.160.0 for 1m0s: &{mu:{state:0 sema:0} read:{v:{m:map[] amended:true}} dirty:map[192.168.160.0:0xc000208018] misses:0}
	I0813 00:26:06.769006  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:06.768911  718922 network.go:235] using free private subnet 192.168.160.0/24: &{IP:192.168.160.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.160.0/24 Gateway:192.168.160.1 ClientMin:192.168.160.2 ClientMax:192.168.160.254 Broadcast:192.168.160.255 Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:}}
	I0813 00:26:06.800059  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | trying to create private KVM network mk-calico-20210813002105-679351 192.168.160.0/24...
	I0813 00:26:07.089994  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | private KVM network mk-calico-20210813002105-679351 192.168.160.0/24 created
	I0813 00:26:07.090040  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:07.089932  718922 common.go:108] Making disk image using store path: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube
	I0813 00:26:07.090059  718671 main.go:130] libmachine: (calico-20210813002105-679351) Setting up store path in /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/calico-20210813002105-679351 ...
	I0813 00:26:07.090090  718671 main.go:130] libmachine: (calico-20210813002105-679351) Building disk image from file:///home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/cache/iso/minikube-v1.22.0-1628238775-12122.iso
	I0813 00:26:07.090121  718671 main.go:130] libmachine: (calico-20210813002105-679351) Downloading /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/cache/boot2docker.iso from file:///home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/cache/iso/minikube-v1.22.0-1628238775-12122.iso...
	I0813 00:26:07.290559  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:07.290335  718922 common.go:115] Creating ssh key: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/calico-20210813002105-679351/id_rsa...
	I0813 00:26:07.551604  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:07.551464  718922 common.go:121] Creating raw disk image: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/calico-20210813002105-679351/calico-20210813002105-679351.rawdisk...
	I0813 00:26:07.551640  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | Writing magic tar header
	I0813 00:26:07.551666  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | Writing SSH key tar header
	I0813 00:26:07.551680  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:07.551579  718922 common.go:135] Fixing permissions on /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/calico-20210813002105-679351 ...
	I0813 00:26:07.551702  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | Checking permissions on dir: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/calico-20210813002105-679351
	I0813 00:26:07.551740  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | Checking permissions on dir: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines
	I0813 00:26:07.551768  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | Checking permissions on dir: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube
	I0813 00:26:07.551787  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | Checking permissions on dir: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b
	I0813 00:26:07.551809  718671 main.go:130] libmachine: (calico-20210813002105-679351) Setting executable bit set on /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/calico-20210813002105-679351 (perms=drwx------)
	I0813 00:26:07.551833  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | Checking permissions on dir: /home/jenkins/minikube-integration
	I0813 00:26:07.551856  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | Checking permissions on dir: /home/jenkins
	I0813 00:26:07.551877  718671 main.go:130] libmachine: (calico-20210813002105-679351) Setting executable bit set on /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines (perms=drwxr-xr-x)
	I0813 00:26:07.551909  718671 main.go:130] libmachine: (calico-20210813002105-679351) Setting executable bit set on /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube (perms=drwxr-xr-x)
	I0813 00:26:07.551924  718671 main.go:130] libmachine: (calico-20210813002105-679351) Setting executable bit set on /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b (perms=drwxr-xr-x)
	I0813 00:26:07.551933  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | Checking permissions on dir: /home
	I0813 00:26:07.551942  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | Skipping /home - not owner
	I0813 00:26:07.551961  718671 main.go:130] libmachine: (calico-20210813002105-679351) Setting executable bit set on /home/jenkins/minikube-integration (perms=drwxr-xr-x)
	I0813 00:26:07.551975  718671 main.go:130] libmachine: (calico-20210813002105-679351) Setting executable bit set on /home/jenkins (perms=drwxr-xr-x)
	I0813 00:26:07.552009  718671 main.go:130] libmachine: (calico-20210813002105-679351) Creating domain...
	I0813 00:26:07.579205  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:04:3e:0f in network default
	I0813 00:26:07.579732  718671 main.go:130] libmachine: (calico-20210813002105-679351) Ensuring networks are active...
	I0813 00:26:07.579757  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:07.581670  718671 main.go:130] libmachine: (calico-20210813002105-679351) Ensuring network default is active
	I0813 00:26:07.582012  718671 main.go:130] libmachine: (calico-20210813002105-679351) Ensuring network mk-calico-20210813002105-679351 is active
	I0813 00:26:07.582604  718671 main.go:130] libmachine: (calico-20210813002105-679351) Getting domain xml...
	I0813 00:26:07.584374  718671 main.go:130] libmachine: (calico-20210813002105-679351) Creating domain...
	I0813 00:26:07.965469  718671 main.go:130] libmachine: (calico-20210813002105-679351) Waiting to get IP...
	I0813 00:26:07.966429  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:07.966927  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | unable to find current IP address of domain calico-20210813002105-679351 in network mk-calico-20210813002105-679351
	I0813 00:26:07.966982  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:07.966887  718922 retry.go:31] will retry after 263.082536ms: waiting for machine to come up
	I0813 00:26:08.231185  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:08.231722  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | unable to find current IP address of domain calico-20210813002105-679351 in network mk-calico-20210813002105-679351
	I0813 00:26:08.231762  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:08.231700  718922 retry.go:31] will retry after 381.329545ms: waiting for machine to come up
	I0813 00:26:08.614165  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:08.614641  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | unable to find current IP address of domain calico-20210813002105-679351 in network mk-calico-20210813002105-679351
	I0813 00:26:08.614668  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:08.614594  718922 retry.go:31] will retry after 422.765636ms: waiting for machine to come up
	I0813 00:26:09.039081  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:09.039582  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | unable to find current IP address of domain calico-20210813002105-679351 in network mk-calico-20210813002105-679351
	I0813 00:26:09.039610  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:09.039537  718922 retry.go:31] will retry after 473.074753ms: waiting for machine to come up
	I0813 00:26:09.514072  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:09.514511  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | unable to find current IP address of domain calico-20210813002105-679351 in network mk-calico-20210813002105-679351
	I0813 00:26:09.514542  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:09.514453  718922 retry.go:31] will retry after 587.352751ms: waiting for machine to come up
	I0813 00:26:10.102980  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:10.103611  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | unable to find current IP address of domain calico-20210813002105-679351 in network mk-calico-20210813002105-679351
	I0813 00:26:10.103651  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:10.103513  718922 retry.go:31] will retry after 834.206799ms: waiting for machine to come up
	I0813 00:26:10.939046  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:10.939730  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | unable to find current IP address of domain calico-20210813002105-679351 in network mk-calico-20210813002105-679351
	I0813 00:26:10.939770  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:10.939670  718922 retry.go:31] will retry after 746.553905ms: waiting for machine to come up
	I0813 00:26:11.688148  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:11.688783  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | unable to find current IP address of domain calico-20210813002105-679351 in network mk-calico-20210813002105-679351
	I0813 00:26:11.688822  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:11.688740  718922 retry.go:31] will retry after 987.362415ms: waiting for machine to come up
	I0813 00:26:12.677749  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:12.678324  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | unable to find current IP address of domain calico-20210813002105-679351 in network mk-calico-20210813002105-679351
	I0813 00:26:12.678355  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:12.678262  718922 retry.go:31] will retry after 1.189835008s: waiting for machine to come up
	I0813 00:26:13.869619  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:13.870163  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | unable to find current IP address of domain calico-20210813002105-679351 in network mk-calico-20210813002105-679351
	I0813 00:26:13.870200  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:13.870111  718922 retry.go:31] will retry after 1.677229867s: waiting for machine to come up
	I0813 00:26:15.549970  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:15.550476  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | unable to find current IP address of domain calico-20210813002105-679351 in network mk-calico-20210813002105-679351
	I0813 00:26:15.550509  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:15.550425  718922 retry.go:31] will retry after 2.346016261s: waiting for machine to come up
	I0813 00:26:17.898262  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:17.898787  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | unable to find current IP address of domain calico-20210813002105-679351 in network mk-calico-20210813002105-679351
	I0813 00:26:17.898907  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:17.898733  718922 retry.go:31] will retry after 3.36678925s: waiting for machine to come up
	I0813 00:26:21.267786  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:21.268376  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | unable to find current IP address of domain calico-20210813002105-679351 in network mk-calico-20210813002105-679351
	I0813 00:26:21.268410  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | I0813 00:26:21.268307  718922 retry.go:31] will retry after 3.11822781s: waiting for machine to come up
	I0813 00:26:24.389580  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:24.390222  718671 main.go:130] libmachine: (calico-20210813002105-679351) Found IP for machine: 192.168.160.169
	I0813 00:26:24.390252  718671 main.go:130] libmachine: (calico-20210813002105-679351) Reserving static IP address...
	I0813 00:26:24.390282  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has current primary IP address 192.168.160.169 and MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:24.390541  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | unable to find host DHCP lease matching {name: "calico-20210813002105-679351", mac: "52:54:00:51:22:1f", ip: "192.168.160.169"} in network mk-calico-20210813002105-679351
	I0813 00:26:25.693882  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | Getting to WaitForSSH function...
	I0813 00:26:25.693927  718671 main.go:130] libmachine: (calico-20210813002105-679351) Reserved static IP address: 192.168.160.169
	I0813 00:26:25.693938  718671 main.go:130] libmachine: (calico-20210813002105-679351) Waiting for SSH to be available...
	I0813 00:26:25.700342  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:25.700704  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:51:22:1f", ip: ""} in network mk-calico-20210813002105-679351: {Iface:virbr14 ExpiryTime:2021-08-13 01:26:22 +0000 UTC Type:0 Mac:52:54:00:51:22:1f Iaid: IPaddr:192.168.160.169 Prefix:24 Hostname:minikube Clientid:01:52:54:00:51:22:1f}
	I0813 00:26:25.700736  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined IP address 192.168.160.169 and MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:25.700876  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | Using SSH client type: external
	I0813 00:26:25.700917  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | Using SSH private key: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/calico-20210813002105-679351/id_rsa (-rw-------)
	I0813 00:26:25.700972  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | &{[-F /dev/null -o ConnectionAttempts=3 -o ConnectTimeout=10 -o ControlMaster=no -o ControlPath=none -o LogLevel=quiet -o PasswordAuthentication=no -o ServerAliveInterval=60 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null docker@192.168.160.169 -o IdentitiesOnly=yes -i /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/calico-20210813002105-679351/id_rsa -p 22] /usr/bin/ssh <nil>}
	I0813 00:26:25.701006  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | About to run SSH command:
	I0813 00:26:25.701019  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | exit 0
	I0813 00:26:25.846878  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | SSH cmd err, output: <nil>: 
	I0813 00:26:25.847436  718671 main.go:130] libmachine: (calico-20210813002105-679351) KVM machine creation complete!
	I0813 00:26:25.847630  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetConfigRaw
	I0813 00:26:25.848409  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .DriverName
	I0813 00:26:25.848690  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .DriverName
	I0813 00:26:25.848882  718671 main.go:130] libmachine: Waiting for machine to be running, this may take a few minutes...
	I0813 00:26:25.848911  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetState
	I0813 00:26:25.851949  718671 main.go:130] libmachine: Detecting operating system of created instance...
	I0813 00:26:25.851980  718671 main.go:130] libmachine: Waiting for SSH to be available...
	I0813 00:26:25.851997  718671 main.go:130] libmachine: Getting to WaitForSSH function...
	I0813 00:26:25.852011  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHHostname
	I0813 00:26:25.858333  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:25.858826  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:51:22:1f", ip: ""} in network mk-calico-20210813002105-679351: {Iface:virbr14 ExpiryTime:2021-08-13 01:26:22 +0000 UTC Type:0 Mac:52:54:00:51:22:1f Iaid: IPaddr:192.168.160.169 Prefix:24 Hostname:calico-20210813002105-679351 Clientid:01:52:54:00:51:22:1f}
	I0813 00:26:25.858850  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined IP address 192.168.160.169 and MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:25.859110  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHPort
	I0813 00:26:25.859294  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHKeyPath
	I0813 00:26:25.859477  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHKeyPath
	I0813 00:26:25.859640  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHUsername
	I0813 00:26:25.859855  718671 main.go:130] libmachine: Using SSH client type: native
	I0813 00:26:25.860085  718671 main.go:130] libmachine: &{{{<nil> 0 [] [] []} docker [0x802ea0] 0x802e60 <nil>  [] 0s} 192.168.160.169 22 <nil> <nil>}
	I0813 00:26:25.860110  718671 main.go:130] libmachine: About to run SSH command:
	exit 0
	I0813 00:26:26.033003  718671 main.go:130] libmachine: SSH cmd err, output: <nil>: 
	I0813 00:26:26.033037  718671 main.go:130] libmachine: Detecting the provisioner...
	I0813 00:26:26.033048  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHHostname
	I0813 00:26:26.038820  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.039243  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:51:22:1f", ip: ""} in network mk-calico-20210813002105-679351: {Iface:virbr14 ExpiryTime:2021-08-13 01:26:22 +0000 UTC Type:0 Mac:52:54:00:51:22:1f Iaid: IPaddr:192.168.160.169 Prefix:24 Hostname:calico-20210813002105-679351 Clientid:01:52:54:00:51:22:1f}
	I0813 00:26:26.039290  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined IP address 192.168.160.169 and MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.039393  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHPort
	I0813 00:26:26.039600  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHKeyPath
	I0813 00:26:26.039786  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHKeyPath
	I0813 00:26:26.039932  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHUsername
	I0813 00:26:26.040081  718671 main.go:130] libmachine: Using SSH client type: native
	I0813 00:26:26.040224  718671 main.go:130] libmachine: &{{{<nil> 0 [] [] []} docker [0x802ea0] 0x802e60 <nil>  [] 0s} 192.168.160.169 22 <nil> <nil>}
	I0813 00:26:26.040234  718671 main.go:130] libmachine: About to run SSH command:
	cat /etc/os-release
	I0813 00:26:26.175348  718671 main.go:130] libmachine: SSH cmd err, output: <nil>: NAME=Buildroot
	VERSION=2020.02.12
	ID=buildroot
	VERSION_ID=2020.02.12
	PRETTY_NAME="Buildroot 2020.02.12"
	
	I0813 00:26:26.175443  718671 main.go:130] libmachine: found compatible host: buildroot
	I0813 00:26:26.175457  718671 main.go:130] libmachine: Provisioning with buildroot...
	I0813 00:26:26.175470  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetMachineName
	I0813 00:26:26.175764  718671 buildroot.go:166] provisioning hostname "calico-20210813002105-679351"
	I0813 00:26:26.175795  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetMachineName
	I0813 00:26:26.175994  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHHostname
	I0813 00:26:26.181483  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.181932  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:51:22:1f", ip: ""} in network mk-calico-20210813002105-679351: {Iface:virbr14 ExpiryTime:2021-08-13 01:26:22 +0000 UTC Type:0 Mac:52:54:00:51:22:1f Iaid: IPaddr:192.168.160.169 Prefix:24 Hostname:calico-20210813002105-679351 Clientid:01:52:54:00:51:22:1f}
	I0813 00:26:26.181992  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined IP address 192.168.160.169 and MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.182075  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHPort
	I0813 00:26:26.182278  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHKeyPath
	I0813 00:26:26.182430  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHKeyPath
	I0813 00:26:26.182599  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHUsername
	I0813 00:26:26.182760  718671 main.go:130] libmachine: Using SSH client type: native
	I0813 00:26:26.182937  718671 main.go:130] libmachine: &{{{<nil> 0 [] [] []} docker [0x802ea0] 0x802e60 <nil>  [] 0s} 192.168.160.169 22 <nil> <nil>}
	I0813 00:26:26.182956  718671 main.go:130] libmachine: About to run SSH command:
	sudo hostname calico-20210813002105-679351 && echo "calico-20210813002105-679351" | sudo tee /etc/hostname
	I0813 00:26:26.326697  718671 main.go:130] libmachine: SSH cmd err, output: <nil>: calico-20210813002105-679351
	
	I0813 00:26:26.326732  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHHostname
	I0813 00:26:26.332811  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.333188  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:51:22:1f", ip: ""} in network mk-calico-20210813002105-679351: {Iface:virbr14 ExpiryTime:2021-08-13 01:26:22 +0000 UTC Type:0 Mac:52:54:00:51:22:1f Iaid: IPaddr:192.168.160.169 Prefix:24 Hostname:calico-20210813002105-679351 Clientid:01:52:54:00:51:22:1f}
	I0813 00:26:26.333219  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined IP address 192.168.160.169 and MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.333425  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHPort
	I0813 00:26:26.333633  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHKeyPath
	I0813 00:26:26.333823  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHKeyPath
	I0813 00:26:26.333960  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHUsername
	I0813 00:26:26.334155  718671 main.go:130] libmachine: Using SSH client type: native
	I0813 00:26:26.334410  718671 main.go:130] libmachine: &{{{<nil> 0 [] [] []} docker [0x802ea0] 0x802e60 <nil>  [] 0s} 192.168.160.169 22 <nil> <nil>}
	I0813 00:26:26.334436  718671 main.go:130] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\scalico-20210813002105-679351' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 calico-20210813002105-679351/g' /etc/hosts;
				else 
					echo '127.0.1.1 calico-20210813002105-679351' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0813 00:26:26.477198  718671 main.go:130] libmachine: SSH cmd err, output: <nil>: 
	I0813 00:26:26.477233  718671 buildroot.go:172] set auth options {CertDir:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube CaCertPath:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikub
e/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube}
	I0813 00:26:26.477293  718671 buildroot.go:174] setting up certificates
	I0813 00:26:26.477306  718671 provision.go:83] configureAuth start
	I0813 00:26:26.477324  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetMachineName
	I0813 00:26:26.477646  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetIP
	I0813 00:26:26.483774  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.484098  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:51:22:1f", ip: ""} in network mk-calico-20210813002105-679351: {Iface:virbr14 ExpiryTime:2021-08-13 01:26:22 +0000 UTC Type:0 Mac:52:54:00:51:22:1f Iaid: IPaddr:192.168.160.169 Prefix:24 Hostname:calico-20210813002105-679351 Clientid:01:52:54:00:51:22:1f}
	I0813 00:26:26.484132  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined IP address 192.168.160.169 and MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.484356  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHHostname
	I0813 00:26:26.488978  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.489307  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:51:22:1f", ip: ""} in network mk-calico-20210813002105-679351: {Iface:virbr14 ExpiryTime:2021-08-13 01:26:22 +0000 UTC Type:0 Mac:52:54:00:51:22:1f Iaid: IPaddr:192.168.160.169 Prefix:24 Hostname:calico-20210813002105-679351 Clientid:01:52:54:00:51:22:1f}
	I0813 00:26:26.489342  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined IP address 192.168.160.169 and MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.489454  718671 provision.go:137] copyHostCerts
	I0813 00:26:26.489560  718671 exec_runner.go:145] found /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/ca.pem, removing ...
	I0813 00:26:26.489575  718671 exec_runner.go:190] rm: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/ca.pem
	I0813 00:26:26.489630  718671 exec_runner.go:152] cp: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/ca.pem (1082 bytes)
	I0813 00:26:26.489714  718671 exec_runner.go:145] found /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/cert.pem, removing ...
	I0813 00:26:26.489725  718671 exec_runner.go:190] rm: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/cert.pem
	I0813 00:26:26.489744  718671 exec_runner.go:152] cp: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/cert.pem (1123 bytes)
	I0813 00:26:26.489789  718671 exec_runner.go:145] found /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/key.pem, removing ...
	I0813 00:26:26.489796  718671 exec_runner.go:190] rm: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/key.pem
	I0813 00:26:26.489811  718671 exec_runner.go:152] cp: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/key.pem (1675 bytes)
	I0813 00:26:26.489888  718671 provision.go:111] generating server cert: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/ca-key.pem org=jenkins.calico-20210813002105-679351 san=[192.168.160.169 192.168.160.169 localhost 127.0.0.1 minikube calico-20210813002105-679351]
	I0813 00:26:26.547667  718671 provision.go:171] copyRemoteCerts
	I0813 00:26:26.547730  718671 ssh_runner.go:149] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0813 00:26:26.547763  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHHostname
	I0813 00:26:26.553433  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.553843  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:51:22:1f", ip: ""} in network mk-calico-20210813002105-679351: {Iface:virbr14 ExpiryTime:2021-08-13 01:26:22 +0000 UTC Type:0 Mac:52:54:00:51:22:1f Iaid: IPaddr:192.168.160.169 Prefix:24 Hostname:calico-20210813002105-679351 Clientid:01:52:54:00:51:22:1f}
	I0813 00:26:26.553879  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined IP address 192.168.160.169 and MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.554105  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHPort
	I0813 00:26:26.554326  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHKeyPath
	I0813 00:26:26.554499  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHUsername
	I0813 00:26:26.554698  718671 sshutil.go:53] new ssh client: &{IP:192.168.160.169 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/calico-20210813002105-679351/id_rsa Username:docker}
	I0813 00:26:26.648852  718671 ssh_runner.go:316] scp /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0813 00:26:26.667897  718671 ssh_runner.go:316] scp /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/server.pem --> /etc/docker/server.pem (1257 bytes)
	I0813 00:26:26.689714  718671 ssh_runner.go:316] scp /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0813 00:26:26.708549  718671 provision.go:86] duration metric: configureAuth took 231.223512ms
	I0813 00:26:26.708574  718671 buildroot.go:189] setting minikube options for container-runtime
	I0813 00:26:26.708756  718671 main.go:130] libmachine: Checking connection to Docker...
	I0813 00:26:26.708774  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetURL
	I0813 00:26:26.711444  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | Using libvirt version 3000000
	I0813 00:26:26.716490  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.716850  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:51:22:1f", ip: ""} in network mk-calico-20210813002105-679351: {Iface:virbr14 ExpiryTime:2021-08-13 01:26:22 +0000 UTC Type:0 Mac:52:54:00:51:22:1f Iaid: IPaddr:192.168.160.169 Prefix:24 Hostname:calico-20210813002105-679351 Clientid:01:52:54:00:51:22:1f}
	I0813 00:26:26.716879  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined IP address 192.168.160.169 and MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.717055  718671 main.go:130] libmachine: Docker is up and running!
	I0813 00:26:26.717067  718671 main.go:130] libmachine: Reticulating splines...
	I0813 00:26:26.717074  718671 client.go:171] LocalClient.Create took 19.976047116s
	I0813 00:26:26.717098  718671 start.go:168] duration metric: libmachine.API.Create for "calico-20210813002105-679351" took 19.976119528s
	I0813 00:26:26.717112  718671 start.go:267] post-start starting for "calico-20210813002105-679351" (driver="kvm2")
	I0813 00:26:26.717120  718671 start.go:277] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0813 00:26:26.717141  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .DriverName
	I0813 00:26:26.717374  718671 ssh_runner.go:149] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0813 00:26:26.717404  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHHostname
	I0813 00:26:26.722317  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.722650  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:51:22:1f", ip: ""} in network mk-calico-20210813002105-679351: {Iface:virbr14 ExpiryTime:2021-08-13 01:26:22 +0000 UTC Type:0 Mac:52:54:00:51:22:1f Iaid: IPaddr:192.168.160.169 Prefix:24 Hostname:calico-20210813002105-679351 Clientid:01:52:54:00:51:22:1f}
	I0813 00:26:26.722678  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined IP address 192.168.160.169 and MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.722837  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHPort
	I0813 00:26:26.723006  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHKeyPath
	I0813 00:26:26.723171  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHUsername
	I0813 00:26:26.723316  718671 sshutil.go:53] new ssh client: &{IP:192.168.160.169 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/calico-20210813002105-679351/id_rsa Username:docker}
	I0813 00:26:26.813029  718671 ssh_runner.go:149] Run: cat /etc/os-release
	I0813 00:26:26.817503  718671 info.go:137] Remote host: Buildroot 2020.02.12
	I0813 00:26:26.817545  718671 filesync.go:126] Scanning /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/addons for local assets ...
	I0813 00:26:26.817612  718671 filesync.go:126] Scanning /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/files for local assets ...
	I0813 00:26:26.817710  718671 filesync.go:149] local asset: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/files/etc/ssl/certs/6793512.pem -> 6793512.pem in /etc/ssl/certs
	I0813 00:26:26.817823  718671 ssh_runner.go:149] Run: sudo mkdir -p /etc/ssl/certs
	I0813 00:26:26.823909  718671 ssh_runner.go:316] scp /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/files/etc/ssl/certs/6793512.pem --> /etc/ssl/certs/6793512.pem (1708 bytes)
	I0813 00:26:26.840872  718671 start.go:270] post-start completed in 123.743305ms
	I0813 00:26:26.840940  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetConfigRaw
	I0813 00:26:26.841552  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetIP
	I0813 00:26:26.847018  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.847396  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:51:22:1f", ip: ""} in network mk-calico-20210813002105-679351: {Iface:virbr14 ExpiryTime:2021-08-13 01:26:22 +0000 UTC Type:0 Mac:52:54:00:51:22:1f Iaid: IPaddr:192.168.160.169 Prefix:24 Hostname:calico-20210813002105-679351 Clientid:01:52:54:00:51:22:1f}
	I0813 00:26:26.847426  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined IP address 192.168.160.169 and MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.847693  718671 profile.go:148] Saving config to /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/config.json ...
	I0813 00:26:26.847910  718671 start.go:129] duration metric: createHost completed in 20.125836511s
	I0813 00:26:26.847927  718671 start.go:80] releasing machines lock for "calico-20210813002105-679351", held for 20.125992516s
	I0813 00:26:26.847981  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .DriverName
	I0813 00:26:26.848193  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetIP
	I0813 00:26:26.853497  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.853931  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:51:22:1f", ip: ""} in network mk-calico-20210813002105-679351: {Iface:virbr14 ExpiryTime:2021-08-13 01:26:22 +0000 UTC Type:0 Mac:52:54:00:51:22:1f Iaid: IPaddr:192.168.160.169 Prefix:24 Hostname:calico-20210813002105-679351 Clientid:01:52:54:00:51:22:1f}
	I0813 00:26:26.853965  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined IP address 192.168.160.169 and MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.854096  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .DriverName
	I0813 00:26:26.854300  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .DriverName
	I0813 00:26:26.854801  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .DriverName
	I0813 00:26:26.855069  718671 ssh_runner.go:149] Run: systemctl --version
	I0813 00:26:26.855083  718671 ssh_runner.go:149] Run: curl -sS -m 2 https://k8s.gcr.io/
	I0813 00:26:26.855093  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHHostname
	I0813 00:26:26.855122  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHHostname
	I0813 00:26:26.861648  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.861676  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.862022  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:51:22:1f", ip: ""} in network mk-calico-20210813002105-679351: {Iface:virbr14 ExpiryTime:2021-08-13 01:26:22 +0000 UTC Type:0 Mac:52:54:00:51:22:1f Iaid: IPaddr:192.168.160.169 Prefix:24 Hostname:calico-20210813002105-679351 Clientid:01:52:54:00:51:22:1f}
	I0813 00:26:26.862061  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:51:22:1f", ip: ""} in network mk-calico-20210813002105-679351: {Iface:virbr14 ExpiryTime:2021-08-13 01:26:22 +0000 UTC Type:0 Mac:52:54:00:51:22:1f Iaid: IPaddr:192.168.160.169 Prefix:24 Hostname:calico-20210813002105-679351 Clientid:01:52:54:00:51:22:1f}
	I0813 00:26:26.862099  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined IP address 192.168.160.169 and MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.862119  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined IP address 192.168.160.169 and MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:26.862270  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHPort
	I0813 00:26:26.862388  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHPort
	I0813 00:26:26.862483  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHKeyPath
	I0813 00:26:26.862512  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHKeyPath
	I0813 00:26:26.862653  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHUsername
	I0813 00:26:26.862666  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHUsername
	I0813 00:26:26.862794  718671 sshutil.go:53] new ssh client: &{IP:192.168.160.169 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/calico-20210813002105-679351/id_rsa Username:docker}
	I0813 00:26:26.862842  718671 sshutil.go:53] new ssh client: &{IP:192.168.160.169 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/calico-20210813002105-679351/id_rsa Username:docker}
	I0813 00:26:26.955665  718671 preload.go:131] Checking if preload exists for k8s version v1.21.3 and runtime containerd
	I0813 00:26:26.955784  718671 ssh_runner.go:149] Run: sudo crictl images --output json
	I0813 00:26:30.977416  718671 ssh_runner.go:189] Completed: sudo crictl images --output json: (4.021607527s)
	I0813 00:26:30.977565  718671 containerd.go:609] couldn't find preloaded image for "k8s.gcr.io/kube-apiserver:v1.21.3". assuming images are not preloaded.
	I0813 00:26:30.977636  718671 ssh_runner.go:149] Run: which lz4
	I0813 00:26:30.982072  718671 ssh_runner.go:149] Run: stat -c "%s %y" /preloaded.tar.lz4
	I0813 00:26:30.986848  718671 ssh_runner.go:306] existence check for /preloaded.tar.lz4: stat -c "%s %y" /preloaded.tar.lz4: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot stat '/preloaded.tar.lz4': No such file or directory
	I0813 00:26:30.986880  718671 ssh_runner.go:316] scp /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v11-v1.21.3-containerd-overlay2-amd64.tar.lz4 --> /preloaded.tar.lz4 (928970367 bytes)
	I0813 00:26:35.177672  718671 containerd.go:546] Took 4.195636 seconds to copy over tarball
	I0813 00:26:35.177767  718671 ssh_runner.go:149] Run: sudo tar -I lz4 -C /var -xf /preloaded.tar.lz4
	I0813 00:26:45.429372  718671 ssh_runner.go:189] Completed: sudo tar -I lz4 -C /var -xf /preloaded.tar.lz4: (10.251565964s)
	I0813 00:26:45.922669  718671 containerd.go:553] Took 10.744947 seconds t extract the tarball
	I0813 00:26:45.922692  718671 ssh_runner.go:100] rm: /preloaded.tar.lz4
	I0813 00:26:45.984256  718671 ssh_runner.go:149] Run: sudo systemctl daemon-reload
	I0813 00:26:46.134330  718671 ssh_runner.go:149] Run: sudo systemctl restart containerd
	I0813 00:26:46.184802  718671 ssh_runner.go:149] Run: sudo systemctl stop -f crio
	I0813 00:26:46.223245  718671 ssh_runner.go:149] Run: sudo systemctl is-active --quiet service crio
	I0813 00:26:46.235951  718671 docker.go:153] disabling docker service ...
	I0813 00:26:46.236017  718671 ssh_runner.go:149] Run: sudo systemctl stop -f docker.socket
	I0813 00:26:46.247737  718671 ssh_runner.go:149] Run: sudo systemctl stop -f docker.service
	I0813 00:26:46.260454  718671 ssh_runner.go:149] Run: sudo systemctl disable docker.socket
	I0813 00:26:46.412590  718671 ssh_runner.go:149] Run: sudo systemctl mask docker.service
	I0813 00:26:46.553660  718671 ssh_runner.go:149] Run: sudo systemctl is-active --quiet service docker
	I0813 00:26:46.567538  718671 ssh_runner.go:149] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	image-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0813 00:26:46.581717  718671 ssh_runner.go:149] Run: /bin/bash -c "sudo mkdir -p /etc/containerd && printf %s "cm9vdCA9ICIvdmFyL2xpYi9jb250YWluZXJkIgpzdGF0ZSA9ICIvcnVuL2NvbnRhaW5lcmQiCm9vbV9zY29yZSA9IDAKW2dycGNdCiAgYWRkcmVzcyA9ICIvcnVuL2NvbnRhaW5lcmQvY29udGFpbmVyZC5zb2NrIgogIHVpZCA9IDAKICBnaWQgPSAwCiAgbWF4X3JlY3ZfbWVzc2FnZV9zaXplID0gMTY3NzcyMTYKICBtYXhfc2VuZF9tZXNzYWdlX3NpemUgPSAxNjc3NzIxNgoKW2RlYnVnXQogIGFkZHJlc3MgPSAiIgogIHVpZCA9IDAKICBnaWQgPSAwCiAgbGV2ZWwgPSAiIgoKW21ldHJpY3NdCiAgYWRkcmVzcyA9ICIiCiAgZ3JwY19oaXN0b2dyYW0gPSBmYWxzZQoKW2Nncm91cF0KICBwYXRoID0gIiIKCltwbHVnaW5zXQogIFtwbHVnaW5zLmNncm91cHNdCiAgICBub19wcm9tZXRoZXVzID0gZmFsc2UKICBbcGx1Z2lucy5jcmldCiAgICBzdHJlYW1fc2VydmVyX2FkZHJlc3MgPSAiIgogICAgc3RyZWFtX3NlcnZlcl9wb3J0ID0gIjEwMDEwIgogICAgZW5hYmxlX3NlbGludXggPSBmYWxzZQogICAgc2FuZGJveF9pbWFnZSA9ICJrOHMuZ2NyLmlvL3BhdXNlOjMuNC4xIgogICAgc3RhdHNfY29sbGVjdF9wZXJpb2QgPSAxMAogICAgZW5hYmxlX3Rsc19zdHJlYW1pbmcgPSBmYWxzZQogICAgbWF4X2NvbnRhaW5lcl9sb2dfbGluZV9zaXplID0gMTYzODQKCglbcGx1Z2lucy4iaW8uY29udGFpbmV
yZC5ncnBjLnYxLmNyaSJdCiAgICAgIFtwbHVnaW5zLiJpby5jb250YWluZXJkLmdycGMudjEuY3JpIi5jb250YWluZXJkXQogICAgICAgIFtwbHVnaW5zLiJpby5jb250YWluZXJkLmdycGMudjEuY3JpIi5jb250YWluZXJkLnJ1bnRpbWVzXQogICAgICAgICAgW3BsdWdpbnMuImlvLmNvbnRhaW5lcmQuZ3JwYy52MS5jcmkiLmNvbnRhaW5lcmQucnVudGltZXMucnVuY10KICAgICAgICAgICAgcnVudGltZV90eXBlID0gImlvLmNvbnRhaW5lcmQucnVuYy52MiIKICAgICAgICAgICAgW3BsdWdpbnMuImlvLmNvbnRhaW5lcmQuZ3JwYy52MS5jcmkiLmNvbnRhaW5lcmQucnVudGltZXMucnVuYy5vcHRpb25zXQogICAgICAgICAgICAgIFN5c3RlbWRDZ3JvdXAgPSBmYWxzZQoKICAgIFtwbHVnaW5zLmNyaS5jb250YWluZXJkXQogICAgICBzbmFwc2hvdHRlciA9ICJvdmVybGF5ZnMiCiAgICAgIFtwbHVnaW5zLmNyaS5jb250YWluZXJkLmRlZmF1bHRfcnVudGltZV0KICAgICAgICBydW50aW1lX3R5cGUgPSAiaW8uY29udGFpbmVyZC5ydW5jLnYyIgogICAgICBbcGx1Z2lucy5jcmkuY29udGFpbmVyZC51bnRydXN0ZWRfd29ya2xvYWRfcnVudGltZV0KICAgICAgICBydW50aW1lX3R5cGUgPSAiIgogICAgICAgIHJ1bnRpbWVfZW5naW5lID0gIiIKICAgICAgICBydW50aW1lX3Jvb3QgPSAiIgogICAgW3BsdWdpbnMuY3JpLmNuaV0KICAgICAgYmluX2RpciA9ICIvb3B0L2NuaS9iaW4iCiAgICAgIGNvbmZfZGlyID0gIi9ldGMvY25pL25ldC5kIgogICAgICB
jb25mX3RlbXBsYXRlID0gIiIKICAgIFtwbHVnaW5zLmNyaS5yZWdpc3RyeV0KICAgICAgW3BsdWdpbnMuY3JpLnJlZ2lzdHJ5Lm1pcnJvcnNdCiAgICAgICAgW3BsdWdpbnMuY3JpLnJlZ2lzdHJ5Lm1pcnJvcnMuImRvY2tlci5pbyJdCiAgICAgICAgICBlbmRwb2ludCA9IFsiaHR0cHM6Ly9yZWdpc3RyeS0xLmRvY2tlci5pbyJdCiAgICAgICAgW3BsdWdpbnMuZGlmZi1zZXJ2aWNlXQogICAgZGVmYXVsdCA9IFsid2Fsa2luZyJdCiAgW3BsdWdpbnMuc2NoZWR1bGVyXQogICAgcGF1c2VfdGhyZXNob2xkID0gMC4wMgogICAgZGVsZXRpb25fdGhyZXNob2xkID0gMAogICAgbXV0YXRpb25fdGhyZXNob2xkID0gMTAwCiAgICBzY2hlZHVsZV9kZWxheSA9ICIwcyIKICAgIHN0YXJ0dXBfZGVsYXkgPSAiMTAwbXMiCg==" | base64 -d | sudo tee /etc/containerd/config.toml"
	I0813 00:26:46.595867  718671 ssh_runner.go:149] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0813 00:26:46.603534  718671 crio.go:128] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0813 00:26:46.603588  718671 ssh_runner.go:149] Run: sudo modprobe br_netfilter
	I0813 00:26:46.619470  718671 ssh_runner.go:149] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0813 00:26:46.626539  718671 ssh_runner.go:149] Run: sudo systemctl daemon-reload
	I0813 00:26:46.761509  718671 ssh_runner.go:149] Run: sudo systemctl restart containerd
	I0813 00:26:47.908883  718671 ssh_runner.go:189] Completed: sudo systemctl restart containerd: (1.147322061s)
	I0813 00:26:47.908932  718671 start.go:392] Will wait 60s for socket path /run/containerd/containerd.sock
	I0813 00:26:47.909002  718671 ssh_runner.go:149] Run: stat /run/containerd/containerd.sock
	I0813 00:26:47.917054  718671 retry.go:31] will retry after 1.104660288s: stat /run/containerd/containerd.sock: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot stat '/run/containerd/containerd.sock': No such file or directory
	I0813 00:26:49.022740  718671 ssh_runner.go:149] Run: stat /run/containerd/containerd.sock
	I0813 00:26:49.028767  718671 start.go:417] Will wait 60s for crictl version
	I0813 00:26:49.028835  718671 ssh_runner.go:149] Run: sudo crictl version
	I0813 00:26:49.101096  718671 start.go:426] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  v1.4.9
	RuntimeApiVersion:  v1alpha2
	I0813 00:26:49.101204  718671 ssh_runner.go:149] Run: containerd --version
	I0813 00:26:49.133299  718671 ssh_runner.go:149] Run: containerd --version
	I0813 00:26:49.172193  718671 out.go:177] * Preparing Kubernetes v1.21.3 on containerd 1.4.9 ...
	I0813 00:26:49.172256  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetIP
	I0813 00:26:49.178427  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:49.178841  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:51:22:1f", ip: ""} in network mk-calico-20210813002105-679351: {Iface:virbr14 ExpiryTime:2021-08-13 01:26:22 +0000 UTC Type:0 Mac:52:54:00:51:22:1f Iaid: IPaddr:192.168.160.169 Prefix:24 Hostname:calico-20210813002105-679351 Clientid:01:52:54:00:51:22:1f}
	I0813 00:26:49.178871  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined IP address 192.168.160.169 and MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:26:49.179194  718671 ssh_runner.go:149] Run: grep 192.168.160.1	host.minikube.internal$ /etc/hosts
	I0813 00:26:49.184947  718671 ssh_runner.go:149] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.160.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0813 00:26:49.197649  718671 preload.go:131] Checking if preload exists for k8s version v1.21.3 and runtime containerd
	I0813 00:26:49.197722  718671 ssh_runner.go:149] Run: sudo crictl images --output json
	I0813 00:26:49.240250  718671 containerd.go:613] all images are preloaded for containerd runtime.
	I0813 00:26:49.240278  718671 containerd.go:517] Images already preloaded, skipping extraction
	I0813 00:26:49.240339  718671 ssh_runner.go:149] Run: sudo crictl images --output json
	I0813 00:26:49.281616  718671 containerd.go:613] all images are preloaded for containerd runtime.
	I0813 00:26:49.281645  718671 cache_images.go:74] Images are preloaded, skipping loading
	I0813 00:26:49.281727  718671 ssh_runner.go:149] Run: sudo crictl info
	I0813 00:26:49.323875  718671 cni.go:93] Creating CNI manager for "calico"
	I0813 00:26:49.323910  718671 kubeadm.go:87] Using pod CIDR: 10.244.0.0/16
	I0813 00:26:49.323929  718671 kubeadm.go:153] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.160.169 APIServerPort:8443 KubernetesVersion:v1.21.3 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:calico-20210813002105-679351 NodeName:calico-20210813002105-679351 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.160.169"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NoTaintMaster:true NodeIP:192.168.160.169 CgroupDriver:cgroupfs Clie
ntCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[]}
	I0813 00:26:49.324109  718671 kubeadm.go:157] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta2
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.160.169
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: /run/containerd/containerd.sock
	  name: "calico-20210813002105-679351"
	  kubeletExtraArgs:
	    node-ip: 192.168.160.169
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta2
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.160.169"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	dns:
	  type: CoreDNS
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.21.3
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0813 00:26:49.324226  718671 kubeadm.go:909] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.21.3/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --container-runtime=remote --container-runtime-endpoint=unix:///run/containerd/containerd.sock --hostname-override=calico-20210813002105-679351 --image-service-endpoint=unix:///run/containerd/containerd.sock --kubeconfig=/etc/kubernetes/kubelet.conf --network-plugin=cni --node-ip=192.168.160.169 --runtime-request-timeout=15m
	
	[Install]
	 config:
	{KubernetesVersion:v1.21.3 ClusterName:calico-20210813002105-679351 Namespace:default APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:calico NodeIP: NodePort:8443 NodeName:}
	I0813 00:26:49.324299  718671 ssh_runner.go:149] Run: sudo ls /var/lib/minikube/binaries/v1.21.3
	I0813 00:26:49.340269  718671 binaries.go:44] Found k8s binaries, skipping transfer
	I0813 00:26:49.340352  718671 ssh_runner.go:149] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0813 00:26:49.350947  718671 ssh_runner.go:316] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (545 bytes)
	I0813 00:26:49.367451  718671 ssh_runner.go:316] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0813 00:26:49.381870  718671 ssh_runner.go:316] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2087 bytes)
	I0813 00:26:49.398357  718671 ssh_runner.go:149] Run: grep 192.168.160.169	control-plane.minikube.internal$ /etc/hosts
	I0813 00:26:49.403940  718671 ssh_runner.go:149] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.160.169	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0813 00:26:49.416637  718671 certs.go:52] Setting up /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351 for IP: 192.168.160.169
	I0813 00:26:49.416701  718671 certs.go:179] skipping minikubeCA CA generation: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/ca.key
	I0813 00:26:49.416720  718671 certs.go:179] skipping proxyClientCA CA generation: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/proxy-client-ca.key
	I0813 00:26:49.416780  718671 certs.go:294] generating minikube-user signed cert: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/client.key
	I0813 00:26:49.416787  718671 crypto.go:69] Generating cert /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/client.crt with IP's: []
	I0813 00:26:49.759432  718671 crypto.go:157] Writing cert to /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/client.crt ...
	I0813 00:26:49.759472  718671 lock.go:36] WriteFile acquiring /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/client.crt: {Name:mkfb841c88ed6a4e8832d6061e2b09ba3d8053fe Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0813 00:26:49.759724  718671 crypto.go:165] Writing key to /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/client.key ...
	I0813 00:26:49.759749  718671 lock.go:36] WriteFile acquiring /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/client.key: {Name:mk0ec85d271916b68c66689da6653669c3fc8b54 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0813 00:26:49.759877  718671 certs.go:294] generating minikube signed cert: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/apiserver.key.b7e78757
	I0813 00:26:49.759891  718671 crypto.go:69] Generating cert /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/apiserver.crt.b7e78757 with IP's: [192.168.160.169 10.96.0.1 127.0.0.1 10.0.0.1]
	I0813 00:26:49.855965  718671 crypto.go:157] Writing cert to /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/apiserver.crt.b7e78757 ...
	I0813 00:26:49.856004  718671 lock.go:36] WriteFile acquiring /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/apiserver.crt.b7e78757: {Name:mk0324a3690b2c52cabbde966fea421a64dbdafd Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0813 00:26:49.856206  718671 crypto.go:165] Writing key to /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/apiserver.key.b7e78757 ...
	I0813 00:26:49.856232  718671 lock.go:36] WriteFile acquiring /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/apiserver.key.b7e78757: {Name:mkd56e815b0641bfebdbb516fd4ef093a458c88b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0813 00:26:49.856361  718671 certs.go:305] copying /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/apiserver.crt.b7e78757 -> /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/apiserver.crt
	I0813 00:26:49.856439  718671 certs.go:309] copying /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/apiserver.key.b7e78757 -> /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/apiserver.key
	I0813 00:26:49.856500  718671 certs.go:294] generating aggregator signed cert: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/proxy-client.key
	I0813 00:26:49.856511  718671 crypto.go:69] Generating cert /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/proxy-client.crt with IP's: []
	I0813 00:26:50.034280  718671 crypto.go:157] Writing cert to /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/proxy-client.crt ...
	I0813 00:26:50.034328  718671 lock.go:36] WriteFile acquiring /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/proxy-client.crt: {Name:mkb8f7be2104d54cdd570ba67ab07ac5a0796dc2 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0813 00:26:50.034572  718671 crypto.go:165] Writing key to /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/proxy-client.key ...
	I0813 00:26:50.034595  718671 lock.go:36] WriteFile acquiring /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/proxy-client.key: {Name:mk94105e56e84a711fd2c1fc08d8071d74b06a92 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0813 00:26:50.034840  718671 certs.go:373] found cert: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/679351.pem (1338 bytes)
	W0813 00:26:50.034906  718671 certs.go:369] ignoring /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/679351_empty.pem, impossibly tiny 0 bytes
	I0813 00:26:50.034924  718671 certs.go:373] found cert: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/ca-key.pem (1679 bytes)
	I0813 00:26:50.034963  718671 certs.go:373] found cert: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/ca.pem (1082 bytes)
	I0813 00:26:50.035007  718671 certs.go:373] found cert: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/cert.pem (1123 bytes)
	I0813 00:26:50.035043  718671 certs.go:373] found cert: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/key.pem (1675 bytes)
	I0813 00:26:50.035126  718671 certs.go:373] found cert: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/files/etc/ssl/certs/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/files/etc/ssl/certs/6793512.pem (1708 bytes)
	I0813 00:26:50.036643  718671 ssh_runner.go:316] scp /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1399 bytes)
	I0813 00:26:50.058828  718671 ssh_runner.go:316] scp /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0813 00:26:50.080768  718671 ssh_runner.go:316] scp /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0813 00:26:50.101969  718671 ssh_runner.go:316] scp /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/calico-20210813002105-679351/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0813 00:26:50.122742  718671 ssh_runner.go:316] scp /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0813 00:26:50.141552  718671 ssh_runner.go:316] scp /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0813 00:26:50.161227  718671 ssh_runner.go:316] scp /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0813 00:26:50.184824  718671 ssh_runner.go:316] scp /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0813 00:26:50.208814  718671 ssh_runner.go:316] scp /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0813 00:26:50.231118  718671 ssh_runner.go:316] scp /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/certs/679351.pem --> /usr/share/ca-certificates/679351.pem (1338 bytes)
	I0813 00:26:50.251840  718671 ssh_runner.go:316] scp /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/files/etc/ssl/certs/6793512.pem --> /usr/share/ca-certificates/6793512.pem (1708 bytes)
	I0813 00:26:50.277666  718671 ssh_runner.go:316] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0813 00:26:50.291053  718671 ssh_runner.go:149] Run: openssl version
	I0813 00:26:50.300050  718671 ssh_runner.go:149] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/6793512.pem && ln -fs /usr/share/ca-certificates/6793512.pem /etc/ssl/certs/6793512.pem"
	I0813 00:26:50.309177  718671 ssh_runner.go:149] Run: ls -la /usr/share/ca-certificates/6793512.pem
	I0813 00:26:50.314800  718671 certs.go:416] hashing: -rw-r--r-- 1 root root 1708 Aug 12 23:48 /usr/share/ca-certificates/6793512.pem
	I0813 00:26:50.314865  718671 ssh_runner.go:149] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/6793512.pem
	I0813 00:26:50.322986  718671 ssh_runner.go:149] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/6793512.pem /etc/ssl/certs/3ec20f2e.0"
	I0813 00:26:50.334316  718671 ssh_runner.go:149] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0813 00:26:50.345002  718671 ssh_runner.go:149] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0813 00:26:50.350773  718671 certs.go:416] hashing: -rw-r--r-- 1 root root 1111 Aug 12 23:41 /usr/share/ca-certificates/minikubeCA.pem
	I0813 00:26:50.350836  718671 ssh_runner.go:149] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0813 00:26:50.358989  718671 ssh_runner.go:149] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0813 00:26:50.368512  718671 ssh_runner.go:149] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/679351.pem && ln -fs /usr/share/ca-certificates/679351.pem /etc/ssl/certs/679351.pem"
	I0813 00:26:50.378219  718671 ssh_runner.go:149] Run: ls -la /usr/share/ca-certificates/679351.pem
	I0813 00:26:50.383686  718671 certs.go:416] hashing: -rw-r--r-- 1 root root 1338 Aug 12 23:48 /usr/share/ca-certificates/679351.pem
	I0813 00:26:50.383748  718671 ssh_runner.go:149] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/679351.pem
	I0813 00:26:50.390559  718671 ssh_runner.go:149] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/679351.pem /etc/ssl/certs/51391683.0"
	I0813 00:26:50.400583  718671 kubeadm.go:390] StartCluster: {Name:calico-20210813002105-679351 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube-builds/iso/12122/minikube-v1.22.0-1628238775-12122.iso KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.25@sha256:6f936e3443b95cd918d77623bf7b595653bb382766e280290a02b4a349e88b79 Memory:2048 CPUs:2 DiskSize:20000 VMDriver: Driver:kvm2 HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.99.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.21.3 ClusterName:calico-202108
13002105-679351 Namespace:default APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:calico NodeIP: NodePort:8443 NodeName:} Nodes:[{Name: IP:192.168.160.169 Port:8443 KubernetesVersion:v1.21.3 ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:5m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: MultiNodeRequested:false ExtraDisks:0}
	I0813 00:26:50.400731  718671 cri.go:41] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0813 00:26:50.400823  718671 ssh_runner.go:149] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0813 00:26:50.440775  718671 cri.go:76] found id: ""
	I0813 00:26:50.440861  718671 ssh_runner.go:149] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0813 00:26:50.449120  718671 ssh_runner.go:149] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0813 00:26:50.456587  718671 ssh_runner.go:149] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0813 00:26:50.466719  718671 kubeadm.go:151] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0813 00:26:50.466767  718671 ssh_runner.go:240] Start: /bin/bash -c "sudo env PATH=/var/lib/minikube/binaries/v1.21.3:$PATH kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,Mem"
	I0813 00:26:51.289572  718671 out.go:204]   - Generating certificates and keys ...
	I0813 00:26:54.263153  718671 out.go:204]   - Booting up control plane ...
	I0813 00:27:10.875741  718671 out.go:204]   - Configuring RBAC rules ...
	I0813 00:27:11.563836  718671 cni.go:93] Creating CNI manager for "calico"
	I0813 00:27:11.565750  718671 out.go:177] * Configuring Calico (Container Networking Interface) ...
	I0813 00:27:11.566012  718671 cni.go:187] applying CNI manifest using /var/lib/minikube/binaries/v1.21.3/kubectl ...
	I0813 00:27:11.566037  718671 ssh_runner.go:316] scp memory --> /var/tmp/minikube/cni.yaml (202053 bytes)
	I0813 00:27:11.594152  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0813 00:27:14.919267  718671 ssh_runner.go:189] Completed: sudo /var/lib/minikube/binaries/v1.21.3/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml: (3.325065914s)
	I0813 00:27:14.919327  718671 ssh_runner.go:149] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0813 00:27:14.919452  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:27:14.919520  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl label nodes minikube.k8s.io/version=v1.22.0 minikube.k8s.io/commit=dc1c3ca26e9449ce488a773126b8450402c94a19 minikube.k8s.io/name=calico-20210813002105-679351 minikube.k8s.io/updated_at=2021_08_13T00_27_14_0700 --all --overwrite --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:27:15.477343  718671 ops.go:34] apiserver oom_adj: -16
	I0813 00:27:15.477443  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:27:16.152510  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:27:16.652371  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:27:17.153224  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:27:17.652587  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:27:18.152978  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:27:18.652806  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:27:19.152397  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:27:19.652898  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:27:20.153268  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:27:20.653037  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:27:21.153150  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:27:21.653265  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:27:22.152416  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:27:24.235966  718671 ssh_runner.go:189] Completed: sudo /var/lib/minikube/binaries/v1.21.3/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig: (2.083500115s)
	I0813 00:27:24.652492  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:27:28.232305  718671 ssh_runner.go:189] Completed: sudo /var/lib/minikube/binaries/v1.21.3/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig: (3.57976999s)
	I0813 00:27:28.652385  718671 ssh_runner.go:149] Run: sudo /var/lib/minikube/binaries/v1.21.3/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0813 00:27:29.125203  718671 kubeadm.go:985] duration metric: took 14.20579902s to wait for elevateKubeSystemPrivileges.
	I0813 00:27:29.125237  718671 kubeadm.go:392] StartCluster complete in 38.724664814s
	I0813 00:27:29.125262  718671 settings.go:142] acquiring lock: {Name:mk513992707531c891d59a503efeac355a20c006 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0813 00:27:29.125373  718671 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/kubeconfig
	I0813 00:27:29.127190  718671 lock.go:36] WriteFile acquiring /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/kubeconfig: {Name:mk4539f4325bfd6eb26b6ddb5c7e1835c2548cd2 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0813 00:27:29.775420  718671 kapi.go:244] deployment "coredns" in namespace "kube-system" and context "calico-20210813002105-679351" rescaled to 1
	I0813 00:27:29.775477  718671 start.go:226] Will wait 5m0s for node &{Name: IP:192.168.160.169 Port:8443 KubernetesVersion:v1.21.3 ControlPlane:true Worker:true}
	I0813 00:27:29.777272  718671 out.go:177] * Verifying Kubernetes components...
	I0813 00:27:29.777334  718671 ssh_runner.go:149] Run: sudo systemctl is-active --quiet service kubelet
	I0813 00:27:29.775527  718671 ssh_runner.go:149] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.21.3/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0813 00:27:29.775545  718671 addons.go:342] enableAddons start: toEnable=map[], additional=[]
	I0813 00:27:29.777455  718671 addons.go:59] Setting storage-provisioner=true in profile "calico-20210813002105-679351"
	I0813 00:27:29.777464  718671 addons.go:59] Setting default-storageclass=true in profile "calico-20210813002105-679351"
	I0813 00:27:29.777481  718671 addons.go:135] Setting addon storage-provisioner=true in "calico-20210813002105-679351"
	I0813 00:27:29.777482  718671 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "calico-20210813002105-679351"
	W0813 00:27:29.777489  718671 addons.go:147] addon storage-provisioner should already be in state true
	I0813 00:27:29.777553  718671 host.go:66] Checking if "calico-20210813002105-679351" exists ...
	I0813 00:27:29.777989  718671 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:27:29.778038  718671 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:27:29.778050  718671 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:27:29.778090  718671 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:27:29.795741  718671 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:35441
	I0813 00:27:29.796032  718671 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:39415
	I0813 00:27:29.796242  718671 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:27:29.796426  718671 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:27:29.796739  718671 main.go:130] libmachine: Using API Version  1
	I0813 00:27:29.796763  718671 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:27:29.796953  718671 main.go:130] libmachine: Using API Version  1
	I0813 00:27:29.796975  718671 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:27:29.797167  718671 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:27:29.797315  718671 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:27:29.797408  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetState
	I0813 00:27:29.797981  718671 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:27:29.798031  718671 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:27:29.809564  718671 addons.go:135] Setting addon default-storageclass=true in "calico-20210813002105-679351"
	W0813 00:27:29.809791  718671 addons.go:147] addon default-storageclass should already be in state true
	I0813 00:27:29.809836  718671 host.go:66] Checking if "calico-20210813002105-679351" exists ...
	I0813 00:27:29.810259  718671 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:27:29.810297  718671 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:27:29.811735  718671 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:42873
	I0813 00:27:29.812152  718671 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:27:29.812618  718671 main.go:130] libmachine: Using API Version  1
	I0813 00:27:29.812642  718671 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:27:29.812995  718671 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:27:29.813169  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetState
	I0813 00:27:29.816591  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .DriverName
	I0813 00:27:29.818809  718671 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0813 00:27:29.818927  718671 addons.go:275] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0813 00:27:29.818946  718671 ssh_runner.go:316] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0813 00:27:29.818970  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHHostname
	I0813 00:27:29.822862  718671 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:39959
	I0813 00:27:29.823290  718671 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:27:29.823808  718671 main.go:130] libmachine: Using API Version  1
	I0813 00:27:29.823848  718671 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:27:29.824235  718671 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:27:29.824858  718671 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:27:29.824906  718671 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:27:29.825377  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:27:29.825869  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:51:22:1f", ip: ""} in network mk-calico-20210813002105-679351: {Iface:virbr14 ExpiryTime:2021-08-13 01:26:22 +0000 UTC Type:0 Mac:52:54:00:51:22:1f Iaid: IPaddr:192.168.160.169 Prefix:24 Hostname:calico-20210813002105-679351 Clientid:01:52:54:00:51:22:1f}
	I0813 00:27:29.825896  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined IP address 192.168.160.169 and MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:27:29.826126  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHPort
	I0813 00:27:29.826280  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHKeyPath
	I0813 00:27:29.826462  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHUsername
	I0813 00:27:29.826628  718671 sshutil.go:53] new ssh client: &{IP:192.168.160.169 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/calico-20210813002105-679351/id_rsa Username:docker}
	I0813 00:27:29.838954  718671 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:37141
	I0813 00:27:29.839414  718671 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:27:29.839894  718671 main.go:130] libmachine: Using API Version  1
	I0813 00:27:29.839923  718671 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:27:29.840333  718671 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:27:29.840536  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetState
	I0813 00:27:29.843899  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .DriverName
	I0813 00:27:29.844142  718671 addons.go:275] installing /etc/kubernetes/addons/storageclass.yaml
	I0813 00:27:29.844166  718671 ssh_runner.go:316] scp memory --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0813 00:27:29.844192  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHHostname
	I0813 00:27:29.850154  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:27:29.850581  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:51:22:1f", ip: ""} in network mk-calico-20210813002105-679351: {Iface:virbr14 ExpiryTime:2021-08-13 01:26:22 +0000 UTC Type:0 Mac:52:54:00:51:22:1f Iaid: IPaddr:192.168.160.169 Prefix:24 Hostname:calico-20210813002105-679351 Clientid:01:52:54:00:51:22:1f}
	I0813 00:27:29.850613  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | domain calico-20210813002105-679351 has defined IP address 192.168.160.169 and MAC address 52:54:00:51:22:1f in network mk-calico-20210813002105-679351
	I0813 00:27:29.850779  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHPort
	I0813 00:27:29.850948  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHKeyPath
	I0813 00:27:29.851091  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .GetSSHUsername
	I0813 00:27:29.851250  718671 sshutil.go:53] new ssh client: &{IP:192.168.160.169 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/calico-20210813002105-679351/id_rsa Username:docker}
	I0813 00:27:30.028273  718671 ssh_runner.go:149] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.21.3/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0813 00:27:30.033656  718671 ssh_runner.go:149] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.21.3/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0813 00:27:30.058136  718671 ssh_runner.go:149] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.21.3/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.160.1 host.minikube.internal\n           fallthrough\n        }' | sudo /var/lib/minikube/binaries/v1.21.3/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0813 00:27:30.060581  718671 node_ready.go:35] waiting up to 5m0s for node "calico-20210813002105-679351" to be "Ready" ...
	I0813 00:27:30.064951  718671 node_ready.go:49] node "calico-20210813002105-679351" has status "Ready":"True"
	I0813 00:27:30.064974  718671 node_ready.go:38] duration metric: took 4.361144ms waiting for node "calico-20210813002105-679351" to be "Ready" ...
	I0813 00:27:30.064986  718671 pod_ready.go:35] extra waiting up to 5m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0813 00:27:30.079905  718671 pod_ready.go:78] waiting up to 5m0s for pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace to be "Ready" ...
	I0813 00:27:32.100759  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:27:32.996693  718671 ssh_runner.go:189] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.21.3/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: (2.962998437s)
	I0813 00:27:32.996759  718671 main.go:130] libmachine: Making call to close driver server
	I0813 00:27:32.996773  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .Close
	I0813 00:27:32.997141  718671 main.go:130] libmachine: Successfully made call to close driver server
	I0813 00:27:32.997163  718671 main.go:130] libmachine: Making call to close connection to plugin binary
	I0813 00:27:32.997175  718671 main.go:130] libmachine: Making call to close driver server
	I0813 00:27:32.997184  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .Close
	I0813 00:27:32.997456  718671 main.go:130] libmachine: Successfully made call to close driver server
	I0813 00:27:32.997481  718671 main.go:130] libmachine: Making call to close connection to plugin binary
	I0813 00:27:32.997495  718671 main.go:130] libmachine: Making call to close driver server
	I0813 00:27:32.997498  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | Closing plugin on server side
	I0813 00:27:32.997504  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .Close
	I0813 00:27:32.997797  718671 main.go:130] libmachine: Successfully made call to close driver server
	I0813 00:27:32.997814  718671 main.go:130] libmachine: Making call to close connection to plugin binary
	I0813 00:27:33.002898  718671 ssh_runner.go:189] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.21.3/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (2.972760156s)
	I0813 00:27:33.002937  718671 main.go:130] libmachine: Making call to close driver server
	I0813 00:27:33.002953  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .Close
	I0813 00:27:33.003298  718671 main.go:130] libmachine: Successfully made call to close driver server
	I0813 00:27:33.003337  718671 main.go:130] libmachine: Making call to close connection to plugin binary
	I0813 00:27:33.003349  718671 main.go:130] libmachine: Making call to close driver server
	I0813 00:27:33.003369  718671 main.go:130] libmachine: (calico-20210813002105-679351) Calling .Close
	I0813 00:27:33.004370  718671 main.go:130] libmachine: (calico-20210813002105-679351) DBG | Closing plugin on server side
	I0813 00:27:33.004468  718671 main.go:130] libmachine: Successfully made call to close driver server
	I0813 00:27:33.004516  718671 main.go:130] libmachine: Making call to close connection to plugin binary
	I0813 00:27:33.006560  718671 out.go:177] * Enabled addons: default-storageclass, storage-provisioner
	I0813 00:27:33.006592  718671 addons.go:344] enableAddons completed in 3.231051443s
	I0813 00:27:33.126565  718671 ssh_runner.go:189] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.21.3/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.160.1 host.minikube.internal\n           fallthrough\n        }' | sudo /var/lib/minikube/binaries/v1.21.3/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -": (3.06838163s)
	I0813 00:27:33.126591  718671 start.go:736] {"host.minikube.internal": 192.168.160.1} host record injected into CoreDNS
	I0813 00:27:34.113660  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:27:36.603389  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:27:38.604456  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:27:41.101551  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:27:44.505624  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:27:51.103805  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:27:53.104560  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:27:55.601574  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:27:58.100999  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:00.101297  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:02.605899  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:05.100568  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:07.104275  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:09.601265  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:11.601664  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:13.602866  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:15.607598  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:18.100035  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:20.101316  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:22.604051  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:25.101883  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:27.610918  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:29.610974  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:32.102760  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:34.103537  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:36.600985  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:38.603230  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:40.606217  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:43.104641  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:45.600883  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:47.609062  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:50.101237  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:52.105879  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:54.603789  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:57.102435  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:28:59.600057  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:01.603268  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:04.100886  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:06.102549  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:08.103744  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:10.602417  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:12.605743  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:15.101794  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:18.694772  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:21.102798  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:24.960308  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:27.099240  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:29.109297  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:31.601107  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:33.602827  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:35.683147  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:38.104792  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:44.788759  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:47.103239  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:49.560742  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:51.606916  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:53.609579  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:56.101483  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:29:58.102902  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:00.602930  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:02.607753  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:09.877085  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:12.103464  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:14.602058  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:16.605042  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:19.130878  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:21.604086  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:24.100728  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:26.103614  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:28.106122  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:30.609574  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:33.102541  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:35.107297  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:37.609746  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:40.101813  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:42.102462  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:44.601255  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:46.601300  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:48.603744  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:51.099867  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:53.100551  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:55.101931  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:57.599998  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:30:59.601273  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:01.602336  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:04.102470  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:06.601396  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:08.603421  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:11.103455  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:13.599400  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:15.603727  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:17.604469  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:20.099963  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:22.101757  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:24.600272  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:26.610505  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:29.108939  718671 pod_ready.go:102] pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:30.138125  718671 pod_ready.go:81] duration metric: took 4m0.058174123s waiting for pod "calico-kube-controllers-85ff9ff759-26vtz" in "kube-system" namespace to be "Ready" ...
	E0813 00:31:30.138153  718671 pod_ready.go:66] WaitExtra: waitPodCondition: timed out waiting for the condition
	I0813 00:31:30.138166  718671 pod_ready.go:78] waiting up to 5m0s for pod "calico-node-g4lp2" in "kube-system" namespace to be "Ready" ...
	I0813 00:31:32.218072  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:34.705832  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:40.598531  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:43.325185  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:45.710202  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:48.203244  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:50.206857  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:52.704079  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:54.709255  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:57.202428  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:31:59.207214  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:01.661946  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:03.705260  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:06.203815  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:08.208287  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:10.713771  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:13.202058  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:15.202093  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:17.206841  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:19.701660  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:21.704417  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:23.707685  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:26.203444  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:28.702110  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:30.704092  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:32.712879  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:35.203993  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:37.702392  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:39.703105  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:42.203270  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:44.703692  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:47.205264  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:49.213559  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:51.701370  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:53.712730  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:56.176348  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:32:58.205982  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:00.210958  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:02.705245  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:04.705817  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:07.203171  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:09.204359  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:11.704155  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:13.706219  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:16.202913  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:18.706668  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:21.202656  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:23.702476  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:25.706518  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:28.201266  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:30.202786  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:32.204247  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:34.701529  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:36.708804  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:39.202054  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:41.702144  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:43.702312  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:45.704456  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:48.202038  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:50.706054  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:52.709376  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:54.711473  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:57.204138  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:33:59.704240  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:02.209732  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:04.704223  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:06.707604  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:08.710383  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:10.784924  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:13.975939  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:18.407604  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:20.701484  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:22.701995  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:24.712107  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:27.203798  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:29.702329  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:31.706456  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:34.201475  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:36.203405  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:38.700778  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:40.700918  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:42.702762  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:45.205051  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:47.703840  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:50.203111  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:52.701567  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:54.703751  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:57.202881  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:34:59.706552  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:35:02.204605  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:35:04.204810  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:35:06.206236  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:35:08.704313  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:35:10.705278  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:35:12.705914  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:35:19.598239  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:35:21.703509  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:35:24.203702  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:35:26.705534  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:35:29.203015  718671 pod_ready.go:102] pod "calico-node-g4lp2" in "kube-system" namespace has status "Ready":"False"
	I0813 00:35:30.207597  718671 pod_ready.go:81] duration metric: took 4m0.06941404s waiting for pod "calico-node-g4lp2" in "kube-system" namespace to be "Ready" ...
	E0813 00:35:30.207623  718671 pod_ready.go:66] WaitExtra: waitPodCondition: timed out waiting for the condition
	I0813 00:35:30.207644  718671 pod_ready.go:38] duration metric: took 8m0.142645212s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0813 00:35:30.209638  718671 out.go:177] 
	W0813 00:35:30.209763  718671 out.go:242] X Exiting due to GUEST_START: wait 5m0s for node: extra waiting: timed out waiting 5m0s for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready"
	X Exiting due to GUEST_START: wait 5m0s for node: extra waiting: timed out waiting 5m0s for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready"
	W0813 00:35:30.209778  718671 out.go:242] * 
	* 
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	W0813 00:35:30.211946  718671 out.go:242] ╭─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                                                                                             │
	│    * If the above advice does not help, please let us know:                                                                                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                                                                                               │
	│                                                                                                                                                             │
	│    * Please attach the following file to the GitHub issue:                                                                                                  │
	│    * - /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/logs/lastStart.txt    │
	│                                                                                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
	╭─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                                                                                             │
	│    * If the above advice does not help, please let us know:                                                                                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                                                                                               │
	│                                                                                                                                                             │
	│    * Please attach the following file to the GitHub issue:                                                                                                  │
	│    * - /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/logs/lastStart.txt    │
	│                                                                                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
	I0813 00:35:30.213434  718671 out.go:177] 

                                                
                                                
** /stderr **
net_test.go:100: failed start: exit status 80
--- FAIL: TestNetworkPlugins/group/calico/Start (580.22s)

                                                
                                    

Test pass (233/263)

Order passed test Duration
3 TestDownloadOnly/v1.14.0/json-events 9.22
4 TestDownloadOnly/v1.14.0/preload-exists 0
8 TestDownloadOnly/v1.14.0/LogsDuration 0.07
10 TestDownloadOnly/v1.21.3/json-events 7.71
11 TestDownloadOnly/v1.21.3/preload-exists 0
15 TestDownloadOnly/v1.21.3/LogsDuration 0.07
17 TestDownloadOnly/v1.22.0-rc.0/json-events 8.69
18 TestDownloadOnly/v1.22.0-rc.0/preload-exists 0
22 TestDownloadOnly/v1.22.0-rc.0/LogsDuration 0.07
23 TestDownloadOnly/DeleteAll 0.24
24 TestDownloadOnly/DeleteAlwaysSucceeds 0.23
26 TestOffline 140.3
29 TestAddons/parallel/Registry 21.86
30 TestAddons/parallel/Ingress 47.21
31 TestAddons/parallel/MetricsServer 5.73
32 TestAddons/parallel/HelmTiller 27.1
33 TestAddons/parallel/Olm 51.8
34 TestAddons/parallel/CSI 87.28
35 TestAddons/parallel/GCPAuth 46
36 TestCertOptions 80.03
38 TestForceSystemdFlag 92.05
39 TestForceSystemdEnv 73.96
40 TestKVMDriverInstallOrUpdate 2.42
44 TestErrorSpam/setup 60.33
45 TestErrorSpam/start 0.43
46 TestErrorSpam/status 0.74
47 TestErrorSpam/pause 3.68
48 TestErrorSpam/unpause 1.58
49 TestErrorSpam/stop 6.26
52 TestFunctional/serial/CopySyncFile 0
53 TestFunctional/serial/StartWithProxy 79.47
54 TestFunctional/serial/AuditLog 0
55 TestFunctional/serial/SoftStart 29.08
56 TestFunctional/serial/KubeContext 0.05
57 TestFunctional/serial/KubectlGetPods 0.21
60 TestFunctional/serial/CacheCmd/cache/add_remote 8.11
61 TestFunctional/serial/CacheCmd/cache/add_local 1.77
62 TestFunctional/serial/CacheCmd/cache/delete_k8s.gcr.io/pause:3.3 0.06
63 TestFunctional/serial/CacheCmd/cache/list 0.05
64 TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node 0.23
65 TestFunctional/serial/CacheCmd/cache/cache_reload 2.52
66 TestFunctional/serial/CacheCmd/cache/delete 0.11
67 TestFunctional/serial/MinikubeKubectlCmd 0.12
68 TestFunctional/serial/MinikubeKubectlCmdDirectly 0.11
69 TestFunctional/serial/ExtraConfig 37.57
70 TestFunctional/serial/ComponentHealth 0.07
71 TestFunctional/serial/LogsCmd 1.41
72 TestFunctional/serial/LogsFileCmd 1.37
74 TestFunctional/parallel/ConfigCmd 0.39
75 TestFunctional/parallel/DashboardCmd 4.32
76 TestFunctional/parallel/DryRun 0.34
77 TestFunctional/parallel/InternationalLanguage 0.18
78 TestFunctional/parallel/StatusCmd 0.93
81 TestFunctional/parallel/ServiceCmd 15.72
82 TestFunctional/parallel/AddonsCmd 0.18
83 TestFunctional/parallel/PersistentVolumeClaim 48.29
85 TestFunctional/parallel/SSHCmd 0.52
86 TestFunctional/parallel/CpCmd 0.51
87 TestFunctional/parallel/MySQL 25.9
88 TestFunctional/parallel/FileSync 0.26
89 TestFunctional/parallel/CertSync 1.77
93 TestFunctional/parallel/NodeLabels 0.07
94 TestFunctional/parallel/LoadImage 3.07
95 TestFunctional/parallel/RemoveImage 3.5
96 TestFunctional/parallel/LoadImageFromFile 1.83
97 TestFunctional/parallel/BuildImage 4.29
98 TestFunctional/parallel/ListImages 0.26
99 TestFunctional/parallel/NonActiveRuntimeDisabled 0.5
101 TestFunctional/parallel/UpdateContextCmd/no_changes 0.11
102 TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster 0.1
103 TestFunctional/parallel/UpdateContextCmd/no_clusters 0.1
104 TestFunctional/parallel/Version/short 0.06
105 TestFunctional/parallel/Version/components 1.18
107 TestFunctional/parallel/TunnelCmd/serial/StartTunnel 0.01
109 TestFunctional/parallel/ProfileCmd/profile_not_create 0.42
110 TestFunctional/parallel/ProfileCmd/profile_list 0.33
111 TestFunctional/parallel/ProfileCmd/profile_json_output 0.33
112 TestFunctional/parallel/MountCmd/any-port 6.02
113 TestFunctional/parallel/TunnelCmd/serial/WaitService/IngressIP 0.07
114 TestFunctional/parallel/TunnelCmd/serial/AccessDirect 0.01
118 TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel 0.11
119 TestFunctional/parallel/MountCmd/specific-port 2.27
120 TestFunctional/delete_busybox_image 0.09
121 TestFunctional/delete_my-image_image 0.04
122 TestFunctional/delete_minikube_cached_images 0.04
126 TestJSONOutput/start/Audit 0
128 TestJSONOutput/start/parallel/DistinctCurrentSteps 0
129 TestJSONOutput/start/parallel/IncreasingCurrentSteps 0
131 TestJSONOutput/pause/Audit 0
133 TestJSONOutput/pause/parallel/DistinctCurrentSteps 0
134 TestJSONOutput/pause/parallel/IncreasingCurrentSteps 0
136 TestJSONOutput/unpause/Audit 0
138 TestJSONOutput/unpause/parallel/DistinctCurrentSteps 0
139 TestJSONOutput/unpause/parallel/IncreasingCurrentSteps 0
141 TestJSONOutput/stop/Audit 0
143 TestJSONOutput/stop/parallel/DistinctCurrentSteps 0
144 TestJSONOutput/stop/parallel/IncreasingCurrentSteps 0
145 TestErrorJSONOutput 0.32
148 TestMainNoArgs 0.05
151 TestMultiNode/serial/FreshStart2Nodes 148.62
152 TestMultiNode/serial/DeployApp2Nodes 5.65
153 TestMultiNode/serial/PingHostFrom2Pods 1.06
154 TestMultiNode/serial/AddNode 58.15
155 TestMultiNode/serial/ProfileList 0.24
156 TestMultiNode/serial/CopyFile 1.79
157 TestMultiNode/serial/StopNode 2.95
158 TestMultiNode/serial/StartAfterStop 72.14
159 TestMultiNode/serial/RestartKeepsNodes 488.29
160 TestMultiNode/serial/DeleteNode 2.15
161 TestMultiNode/serial/StopMultiNode 184.41
162 TestMultiNode/serial/RestartMultiNode 202.45
163 TestMultiNode/serial/ValidateNameConflict 72.25
169 TestDebPackageInstall/install_amd64_debian:sid/minikube 0
170 TestDebPackageInstall/install_amd64_debian:sid/kvm2-driver 11.32
172 TestDebPackageInstall/install_amd64_debian:latest/minikube 0
173 TestDebPackageInstall/install_amd64_debian:latest/kvm2-driver 10.48
175 TestDebPackageInstall/install_amd64_debian:10/minikube 0
176 TestDebPackageInstall/install_amd64_debian:10/kvm2-driver 9.94
178 TestDebPackageInstall/install_amd64_debian:9/minikube 0
179 TestDebPackageInstall/install_amd64_debian:9/kvm2-driver 8.68
181 TestDebPackageInstall/install_amd64_ubuntu:latest/minikube 0
182 TestDebPackageInstall/install_amd64_ubuntu:latest/kvm2-driver 14.85
184 TestDebPackageInstall/install_amd64_ubuntu:20.10/minikube 0
185 TestDebPackageInstall/install_amd64_ubuntu:20.10/kvm2-driver 14.96
187 TestDebPackageInstall/install_amd64_ubuntu:20.04/minikube 0
188 TestDebPackageInstall/install_amd64_ubuntu:20.04/kvm2-driver 14.35
190 TestDebPackageInstall/install_amd64_ubuntu:18.04/minikube 0
191 TestDebPackageInstall/install_amd64_ubuntu:18.04/kvm2-driver 13.76
192 TestPreload 147.51
194 TestScheduledStopUnix 90.87
198 TestRunningBinaryUpgrade 138.53
200 TestKubernetesUpgrade 258.98
203 TestPause/serial/Start 178.13
211 TestNetworkPlugins/group/false 0.42
215 TestPause/serial/SecondStartNoReconfiguration 70.16
216 TestStoppedBinaryUpgrade/MinikubeLogs 1.53
224 TestNetworkPlugins/group/auto/Start 109.98
225 TestPause/serial/Pause 0.92
226 TestPause/serial/VerifyStatus 0.33
227 TestPause/serial/Unpause 3.06
229 TestPause/serial/DeletePaused 1.2
230 TestPause/serial/VerifyDeletedResources 20.24
231 TestNetworkPlugins/group/kindnet/Start 112.46
232 TestNetworkPlugins/group/auto/KubeletFlags 0.23
233 TestNetworkPlugins/group/auto/NetCatPod 11.62
234 TestNetworkPlugins/group/auto/DNS 0.35
235 TestNetworkPlugins/group/auto/Localhost 0.3
236 TestNetworkPlugins/group/auto/HairPin 0.25
237 TestNetworkPlugins/group/cilium/Start 159.66
239 TestNetworkPlugins/group/kindnet/ControllerPod 5.04
240 TestNetworkPlugins/group/kindnet/KubeletFlags 0.26
241 TestNetworkPlugins/group/kindnet/NetCatPod 10.96
242 TestNetworkPlugins/group/kindnet/DNS 3.06
243 TestNetworkPlugins/group/kindnet/Localhost 0.28
244 TestNetworkPlugins/group/kindnet/HairPin 0.26
245 TestNetworkPlugins/group/custom-weave/Start 121.97
246 TestNetworkPlugins/group/flannel/Start 126.9
247 TestNetworkPlugins/group/cilium/ControllerPod 5.03
248 TestNetworkPlugins/group/cilium/KubeletFlags 0.22
249 TestNetworkPlugins/group/cilium/NetCatPod 11.73
250 TestNetworkPlugins/group/cilium/DNS 0.36
251 TestNetworkPlugins/group/cilium/Localhost 0.24
252 TestNetworkPlugins/group/cilium/HairPin 0.25
253 TestNetworkPlugins/group/bridge/Start 125.42
254 TestNetworkPlugins/group/custom-weave/KubeletFlags 0.22
255 TestNetworkPlugins/group/custom-weave/NetCatPod 11.42
256 TestNetworkPlugins/group/enable-default-cni/Start 129.53
257 TestNetworkPlugins/group/flannel/ControllerPod 5.03
258 TestNetworkPlugins/group/flannel/KubeletFlags 0.27
259 TestNetworkPlugins/group/flannel/NetCatPod 13.51
260 TestNetworkPlugins/group/flannel/DNS 0.28
261 TestNetworkPlugins/group/flannel/Localhost 0.21
262 TestNetworkPlugins/group/flannel/HairPin 0.21
264 TestStartStop/group/old-k8s-version/serial/FirstStart 152.09
265 TestNetworkPlugins/group/bridge/KubeletFlags 0.22
266 TestNetworkPlugins/group/bridge/NetCatPod 9.6
267 TestNetworkPlugins/group/bridge/DNS 0.25
268 TestNetworkPlugins/group/bridge/Localhost 0.19
269 TestNetworkPlugins/group/bridge/HairPin 0.21
271 TestStartStop/group/embed-certs/serial/FirstStart 117.14
272 TestNetworkPlugins/group/enable-default-cni/KubeletFlags 0.24
273 TestNetworkPlugins/group/enable-default-cni/NetCatPod 11.59
274 TestNetworkPlugins/group/enable-default-cni/DNS 0.24
275 TestNetworkPlugins/group/enable-default-cni/Localhost 0.19
276 TestNetworkPlugins/group/enable-default-cni/HairPin 0.19
278 TestStartStop/group/no-preload/serial/FirstStart 142.96
279 TestStartStop/group/old-k8s-version/serial/DeployApp 10.82
280 TestStartStop/group/old-k8s-version/serial/EnableAddonWhileActive 1.31
281 TestStartStop/group/old-k8s-version/serial/Stop 92.55
282 TestStartStop/group/embed-certs/serial/DeployApp 8.62
283 TestStartStop/group/embed-certs/serial/EnableAddonWhileActive 1.04
284 TestStartStop/group/embed-certs/serial/Stop 92.64
285 TestStartStop/group/old-k8s-version/serial/EnableAddonAfterStop 0.17
286 TestStartStop/group/old-k8s-version/serial/SecondStart 540.93
287 TestStartStop/group/no-preload/serial/DeployApp 9.65
288 TestStartStop/group/no-preload/serial/EnableAddonWhileActive 1.18
289 TestStartStop/group/no-preload/serial/Stop 92.66
290 TestStartStop/group/embed-certs/serial/EnableAddonAfterStop 0.16
291 TestStartStop/group/embed-certs/serial/SecondStart 439.33
293 TestStartStop/group/default-k8s-different-port/serial/FirstStart 112.48
294 TestStartStop/group/no-preload/serial/EnableAddonAfterStop 0.16
295 TestStartStop/group/no-preload/serial/SecondStart 356.73
296 TestStartStop/group/default-k8s-different-port/serial/DeployApp 8.65
297 TestStartStop/group/default-k8s-different-port/serial/EnableAddonWhileActive 1.07
298 TestStartStop/group/default-k8s-different-port/serial/Stop 92.52
299 TestStartStop/group/default-k8s-different-port/serial/EnableAddonAfterStop 0.16
300 TestStartStop/group/default-k8s-different-port/serial/SecondStart 452.72
301 TestStartStop/group/no-preload/serial/UserAppExistsAfterStop 14.03
302 TestStartStop/group/no-preload/serial/AddonExistsAfterStop 5.11
303 TestStartStop/group/no-preload/serial/VerifyKubernetesImages 0.25
304 TestStartStop/group/no-preload/serial/Pause 2.75
306 TestStartStop/group/newest-cni/serial/FirstStart 86.32
307 TestStartStop/group/embed-certs/serial/UserAppExistsAfterStop 6.03
308 TestStartStop/group/embed-certs/serial/AddonExistsAfterStop 5.1
309 TestStartStop/group/embed-certs/serial/VerifyKubernetesImages 0.24
310 TestStartStop/group/embed-certs/serial/Pause 2.61
311 TestStartStop/group/old-k8s-version/serial/UserAppExistsAfterStop 5.02
312 TestStartStop/group/old-k8s-version/serial/AddonExistsAfterStop 5.26
313 TestStartStop/group/old-k8s-version/serial/VerifyKubernetesImages 0.31
314 TestStartStop/group/old-k8s-version/serial/Pause 2.92
315 TestStartStop/group/newest-cni/serial/DeployApp 0
316 TestStartStop/group/newest-cni/serial/EnableAddonWhileActive 1.04
317 TestStartStop/group/newest-cni/serial/Stop 5.11
318 TestStartStop/group/newest-cni/serial/EnableAddonAfterStop 0.15
319 TestStartStop/group/newest-cni/serial/SecondStart 114.31
320 TestStartStop/group/newest-cni/serial/UserAppExistsAfterStop 0
321 TestStartStop/group/newest-cni/serial/AddonExistsAfterStop 0
322 TestStartStop/group/newest-cni/serial/VerifyKubernetesImages 0.25
323 TestStartStop/group/newest-cni/serial/Pause 2.18
324 TestStartStop/group/default-k8s-different-port/serial/UserAppExistsAfterStop 5.04
325 TestStartStop/group/default-k8s-different-port/serial/AddonExistsAfterStop 5.1
326 TestStartStop/group/default-k8s-different-port/serial/VerifyKubernetesImages 0.26
327 TestStartStop/group/default-k8s-different-port/serial/Pause 2.49
x
+
TestDownloadOnly/v1.14.0/json-events (9.22s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.14.0/json-events
aaa_download_only_test.go:69: (dbg) Run:  out/minikube-linux-amd64 start -o=json --download-only -p download-only-20210812234017-679351 --force --alsologtostderr --kubernetes-version=v1.14.0 --container-runtime=containerd --driver=kvm2  --container-runtime=containerd
aaa_download_only_test.go:69: (dbg) Done: out/minikube-linux-amd64 start -o=json --download-only -p download-only-20210812234017-679351 --force --alsologtostderr --kubernetes-version=v1.14.0 --container-runtime=containerd --driver=kvm2  --container-runtime=containerd: (9.215041445s)
--- PASS: TestDownloadOnly/v1.14.0/json-events (9.22s)

                                                
                                    
x
+
TestDownloadOnly/v1.14.0/preload-exists (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.14.0/preload-exists
--- PASS: TestDownloadOnly/v1.14.0/preload-exists (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.14.0/LogsDuration (0.07s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.14.0/LogsDuration
aaa_download_only_test.go:171: (dbg) Run:  out/minikube-linux-amd64 logs -p download-only-20210812234017-679351
aaa_download_only_test.go:171: (dbg) Non-zero exit: out/minikube-linux-amd64 logs -p download-only-20210812234017-679351: exit status 85 (66.160502ms)

                                                
                                                
-- stdout --
	* 
	* ==> Audit <==
	* |---------|------|---------|------|---------|------------|----------|
	| Command | Args | Profile | User | Version | Start Time | End Time |
	|---------|------|---------|------|---------|------------|----------|
	|---------|------|---------|------|---------|------------|----------|
	
	* 
	* ==> Last Start <==
	* Log file created at: 2021/08/12 23:40:17
	Running on machine: debian-jenkins-agent-10
	Binary: Built with gc go1.16.7 for linux/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0812 23:40:17.466958  679363 out.go:298] Setting OutFile to fd 1 ...
	I0812 23:40:17.467031  679363 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0812 23:40:17.467035  679363 out.go:311] Setting ErrFile to fd 2...
	I0812 23:40:17.467038  679363 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0812 23:40:17.467688  679363 root.go:313] Updating PATH: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/bin
	W0812 23:40:17.467893  679363 root.go:291] Error reading config file at /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/config/config.json: open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/config/config.json: no such file or directory
	I0812 23:40:17.468368  679363 out.go:305] Setting JSON to true
	I0812 23:40:17.503633  679363 start.go:111] hostinfo: {"hostname":"debian-jenkins-agent-10","uptime":12181,"bootTime":1628799437,"procs":215,"os":"linux","platform":"debian","platformFamily":"debian","platformVersion":"9.13","kernelVersion":"4.9.0-16-amd64","kernelArch":"x86_64","virtualizationSystem":"kvm","virtualizationRole":"guest","hostId":"c29e0b88-ef83-6765-d2fa-208fdce1af32"}
	I0812 23:40:17.503750  679363 start.go:121] virtualization: kvm guest
	I0812 23:40:17.506916  679363 notify.go:169] Checking for updates...
	I0812 23:40:17.508834  679363 driver.go:335] Setting default libvirt URI to qemu:///system
	I0812 23:40:17.537775  679363 start.go:278] selected driver: kvm2
	I0812 23:40:17.537794  679363 start.go:751] validating driver "kvm2" against <nil>
	I0812 23:40:17.538487  679363 install.go:52] acquiring lock: {Name:mk900956b073697a4aa6c80a27c6bb0742a99a53 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0812 23:40:17.538653  679363 install.go:117] Validating docker-machine-driver-kvm2, PATH=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/bin:/home/jenkins/workspace/KVM_Linux_containerd_integration/out/:/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games:/usr/local/go/bin:/home/jenkins/go/bin:/usr/local/bin/:/usr/local/go/bin/:/home/jenkins/go/bin
	I0812 23:40:17.549603  679363 install.go:137] /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2 version is 1.22.0
	I0812 23:40:17.549658  679363 start_flags.go:263] no existing cluster config was found, will generate one from the flags 
	I0812 23:40:17.550151  679363 start_flags.go:344] Using suggested 6000MB memory alloc based on sys=32179MB, container=0MB
	I0812 23:40:17.550287  679363 start_flags.go:679] Wait components to verify : map[apiserver:true system_pods:true]
	I0812 23:40:17.550312  679363 cni.go:93] Creating CNI manager for ""
	I0812 23:40:17.550320  679363 cni.go:163] "kvm2" driver + containerd runtime found, recommending bridge
	I0812 23:40:17.550333  679363 start_flags.go:272] Found "bridge CNI" CNI - setting NetworkPlugin=cni
	I0812 23:40:17.550341  679363 start_flags.go:277] config:
	{Name:download-only-20210812234017-679351 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.25@sha256:6f936e3443b95cd918d77623bf7b595653bb382766e280290a02b4a349e88b79 Memory:6000 CPUs:2 DiskSize:20000 VMDriver: Driver:kvm2 HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.99.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.14.0 ClusterName:download-only-20210812234017-679351 Namespace:default APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:contai
nerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI: NodeIP: NodePort:8443 NodeName:} Nodes:[] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: MultiNodeRequested:false ExtraDisks:0}
	I0812 23:40:17.550534  679363 iso.go:123] acquiring lock: {Name:mke80f4e00d5590a17349e0875191e5cd211cb9b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0812 23:40:17.552520  679363 download.go:92] Downloading: https://storage.googleapis.com/minikube-builds/iso/12122/minikube-v1.22.0-1628238775-12122.iso?checksum=file:https://storage.googleapis.com/minikube-builds/iso/12122/minikube-v1.22.0-1628238775-12122.iso.sha256 -> /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/cache/iso/minikube-v1.22.0-1628238775-12122.iso
	I0812 23:40:19.654836  679363 preload.go:131] Checking if preload exists for k8s version v1.14.0 and runtime containerd
	I0812 23:40:19.703076  679363 preload.go:118] Found remote preload: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/preloaded-images-k8s-v11-v1.14.0-containerd-overlay2-amd64.tar.lz4
	I0812 23:40:19.703132  679363 cache.go:56] Caching tarball of preloaded images
	I0812 23:40:19.703336  679363 preload.go:131] Checking if preload exists for k8s version v1.14.0 and runtime containerd
	I0812 23:40:19.705320  679363 preload.go:237] getting checksum for preloaded-images-k8s-v11-v1.14.0-containerd-overlay2-amd64.tar.lz4 ...
	I0812 23:40:19.766919  679363 download.go:92] Downloading: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/preloaded-images-k8s-v11-v1.14.0-containerd-overlay2-amd64.tar.lz4?checksum=md5:8891d3d5a9795ff90493434142d1724b -> /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v11-v1.14.0-containerd-overlay2-amd64.tar.lz4
	
	* 
	* The control plane node "" does not exist.
	  To start a cluster, run: "minikube start -p download-only-20210812234017-679351"

                                                
                                                
-- /stdout --
aaa_download_only_test.go:172: minikube logs failed with error: exit status 85
--- PASS: TestDownloadOnly/v1.14.0/LogsDuration (0.07s)

                                                
                                    
x
+
TestDownloadOnly/v1.21.3/json-events (7.71s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.21.3/json-events
aaa_download_only_test.go:69: (dbg) Run:  out/minikube-linux-amd64 start -o=json --download-only -p download-only-20210812234017-679351 --force --alsologtostderr --kubernetes-version=v1.21.3 --container-runtime=containerd --driver=kvm2  --container-runtime=containerd
aaa_download_only_test.go:69: (dbg) Done: out/minikube-linux-amd64 start -o=json --download-only -p download-only-20210812234017-679351 --force --alsologtostderr --kubernetes-version=v1.21.3 --container-runtime=containerd --driver=kvm2  --container-runtime=containerd: (7.713068558s)
--- PASS: TestDownloadOnly/v1.21.3/json-events (7.71s)

                                                
                                    
x
+
TestDownloadOnly/v1.21.3/preload-exists (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.21.3/preload-exists
--- PASS: TestDownloadOnly/v1.21.3/preload-exists (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.21.3/LogsDuration (0.07s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.21.3/LogsDuration
aaa_download_only_test.go:171: (dbg) Run:  out/minikube-linux-amd64 logs -p download-only-20210812234017-679351
aaa_download_only_test.go:171: (dbg) Non-zero exit: out/minikube-linux-amd64 logs -p download-only-20210812234017-679351: exit status 85 (66.726054ms)

                                                
                                                
-- stdout --
	* 
	* ==> Audit <==
	* |---------|------|---------|------|---------|------------|----------|
	| Command | Args | Profile | User | Version | Start Time | End Time |
	|---------|------|---------|------|---------|------------|----------|
	|---------|------|---------|------|---------|------------|----------|
	
	* 
	* ==> Last Start <==
	* Log file created at: 2021/08/12 23:40:26
	Running on machine: debian-jenkins-agent-10
	Binary: Built with gc go1.16.7 for linux/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0812 23:40:26.748678  679399 out.go:298] Setting OutFile to fd 1 ...
	I0812 23:40:26.748750  679399 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0812 23:40:26.748756  679399 out.go:311] Setting ErrFile to fd 2...
	I0812 23:40:26.748761  679399 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0812 23:40:26.748862  679399 root.go:313] Updating PATH: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/bin
	W0812 23:40:26.749004  679399 root.go:291] Error reading config file at /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/config/config.json: open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/config/config.json: no such file or directory
	I0812 23:40:26.749155  679399 out.go:305] Setting JSON to true
	I0812 23:40:26.784655  679399 start.go:111] hostinfo: {"hostname":"debian-jenkins-agent-10","uptime":12190,"bootTime":1628799437,"procs":215,"os":"linux","platform":"debian","platformFamily":"debian","platformVersion":"9.13","kernelVersion":"4.9.0-16-amd64","kernelArch":"x86_64","virtualizationSystem":"kvm","virtualizationRole":"guest","hostId":"c29e0b88-ef83-6765-d2fa-208fdce1af32"}
	I0812 23:40:26.784742  679399 start.go:121] virtualization: kvm guest
	I0812 23:40:26.787233  679399 notify.go:169] Checking for updates...
	W0812 23:40:26.789370  679399 start.go:659] api.Load failed for download-only-20210812234017-679351: filestore "download-only-20210812234017-679351": Docker machine "download-only-20210812234017-679351" does not exist. Use "docker-machine ls" to list machines. Use "docker-machine create" to add a new one.
	I0812 23:40:26.789419  679399 driver.go:335] Setting default libvirt URI to qemu:///system
	W0812 23:40:26.789457  679399 start.go:659] api.Load failed for download-only-20210812234017-679351: filestore "download-only-20210812234017-679351": Docker machine "download-only-20210812234017-679351" does not exist. Use "docker-machine ls" to list machines. Use "docker-machine create" to add a new one.
	I0812 23:40:26.817915  679399 start.go:278] selected driver: kvm2
	I0812 23:40:26.817936  679399 start.go:751] validating driver "kvm2" against &{Name:download-only-20210812234017-679351 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube-builds/iso/12122/minikube-v1.22.0-1628238775-12122.iso KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.25@sha256:6f936e3443b95cd918d77623bf7b595653bb382766e280290a02b4a349e88b79 Memory:6000 CPUs:2 DiskSize:20000 VMDriver: Driver:kvm2 HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.99.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.14.0
ClusterName:download-only-20210812234017-679351 Namespace:default APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI: NodeIP: NodePort:8443 NodeName:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.14.0 ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: MultiNodeRequested:false ExtraDisks:0}
	I0812 23:40:26.818744  679399 install.go:52] acquiring lock: {Name:mk900956b073697a4aa6c80a27c6bb0742a99a53 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0812 23:40:26.818914  679399 install.go:117] Validating docker-machine-driver-kvm2, PATH=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/bin:/home/jenkins/workspace/KVM_Linux_containerd_integration/out/:/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games:/usr/local/go/bin:/home/jenkins/go/bin:/usr/local/bin/:/usr/local/go/bin/:/home/jenkins/go/bin
	I0812 23:40:26.829812  679399 install.go:137] /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2 version is 1.22.0
	I0812 23:40:26.830513  679399 cni.go:93] Creating CNI manager for ""
	I0812 23:40:26.830528  679399 cni.go:163] "kvm2" driver + containerd runtime found, recommending bridge
	I0812 23:40:26.830536  679399 start_flags.go:277] config:
	{Name:download-only-20210812234017-679351 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube-builds/iso/12122/minikube-v1.22.0-1628238775-12122.iso KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.25@sha256:6f936e3443b95cd918d77623bf7b595653bb382766e280290a02b4a349e88b79 Memory:6000 CPUs:2 DiskSize:20000 VMDriver: Driver:kvm2 HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.99.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.21.3 ClusterName:download-only-20210812234017-679351 Namespace:default APIServerNam
e:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI: NodeIP: NodePort:8443 NodeName:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.14.0 ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: MultiNodeRequested:false ExtraDisks:0}
	I0812 23:40:26.830646  679399 iso.go:123] acquiring lock: {Name:mke80f4e00d5590a17349e0875191e5cd211cb9b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0812 23:40:26.832855  679399 preload.go:131] Checking if preload exists for k8s version v1.21.3 and runtime containerd
	I0812 23:40:26.878582  679399 preload.go:118] Found remote preload: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/preloaded-images-k8s-v11-v1.21.3-containerd-overlay2-amd64.tar.lz4
	I0812 23:40:26.878610  679399 cache.go:56] Caching tarball of preloaded images
	I0812 23:40:26.878760  679399 preload.go:131] Checking if preload exists for k8s version v1.21.3 and runtime containerd
	I0812 23:40:26.880828  679399 preload.go:237] getting checksum for preloaded-images-k8s-v11-v1.21.3-containerd-overlay2-amd64.tar.lz4 ...
	I0812 23:40:26.939965  679399 download.go:92] Downloading: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/preloaded-images-k8s-v11-v1.21.3-containerd-overlay2-amd64.tar.lz4?checksum=md5:6ee74ddc722ac9485c71891d6e62193d -> /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v11-v1.21.3-containerd-overlay2-amd64.tar.lz4
	
	* 
	* The control plane node "" does not exist.
	  To start a cluster, run: "minikube start -p download-only-20210812234017-679351"

                                                
                                                
-- /stdout --
aaa_download_only_test.go:172: minikube logs failed with error: exit status 85
--- PASS: TestDownloadOnly/v1.21.3/LogsDuration (0.07s)

                                                
                                    
x
+
TestDownloadOnly/v1.22.0-rc.0/json-events (8.69s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.22.0-rc.0/json-events
aaa_download_only_test.go:69: (dbg) Run:  out/minikube-linux-amd64 start -o=json --download-only -p download-only-20210812234017-679351 --force --alsologtostderr --kubernetes-version=v1.22.0-rc.0 --container-runtime=containerd --driver=kvm2  --container-runtime=containerd
aaa_download_only_test.go:69: (dbg) Done: out/minikube-linux-amd64 start -o=json --download-only -p download-only-20210812234017-679351 --force --alsologtostderr --kubernetes-version=v1.22.0-rc.0 --container-runtime=containerd --driver=kvm2  --container-runtime=containerd: (8.686805866s)
--- PASS: TestDownloadOnly/v1.22.0-rc.0/json-events (8.69s)

                                                
                                    
x
+
TestDownloadOnly/v1.22.0-rc.0/preload-exists (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.22.0-rc.0/preload-exists
--- PASS: TestDownloadOnly/v1.22.0-rc.0/preload-exists (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.22.0-rc.0/LogsDuration (0.07s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.22.0-rc.0/LogsDuration
aaa_download_only_test.go:171: (dbg) Run:  out/minikube-linux-amd64 logs -p download-only-20210812234017-679351
aaa_download_only_test.go:171: (dbg) Non-zero exit: out/minikube-linux-amd64 logs -p download-only-20210812234017-679351: exit status 85 (66.226403ms)

                                                
                                                
-- stdout --
	* 
	* ==> Audit <==
	* |---------|------|---------|------|---------|------------|----------|
	| Command | Args | Profile | User | Version | Start Time | End Time |
	|---------|------|---------|------|---------|------------|----------|
	|---------|------|---------|------|---------|------------|----------|
	
	* 
	* ==> Last Start <==
	* Log file created at: 2021/08/12 23:40:34
	Running on machine: debian-jenkins-agent-10
	Binary: Built with gc go1.16.7 for linux/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0812 23:40:34.531361  679434 out.go:298] Setting OutFile to fd 1 ...
	I0812 23:40:34.531972  679434 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0812 23:40:34.531988  679434 out.go:311] Setting ErrFile to fd 2...
	I0812 23:40:34.531994  679434 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0812 23:40:34.532270  679434 root.go:313] Updating PATH: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/bin
	W0812 23:40:34.532734  679434 root.go:291] Error reading config file at /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/config/config.json: open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/config/config.json: no such file or directory
	I0812 23:40:34.532913  679434 out.go:305] Setting JSON to true
	I0812 23:40:34.569664  679434 start.go:111] hostinfo: {"hostname":"debian-jenkins-agent-10","uptime":12198,"bootTime":1628799437,"procs":215,"os":"linux","platform":"debian","platformFamily":"debian","platformVersion":"9.13","kernelVersion":"4.9.0-16-amd64","kernelArch":"x86_64","virtualizationSystem":"kvm","virtualizationRole":"guest","hostId":"c29e0b88-ef83-6765-d2fa-208fdce1af32"}
	I0812 23:40:34.569771  679434 start.go:121] virtualization: kvm guest
	I0812 23:40:34.572451  679434 notify.go:169] Checking for updates...
	W0812 23:40:34.574636  679434 start.go:659] api.Load failed for download-only-20210812234017-679351: filestore "download-only-20210812234017-679351": Docker machine "download-only-20210812234017-679351" does not exist. Use "docker-machine ls" to list machines. Use "docker-machine create" to add a new one.
	I0812 23:40:34.574698  679434 driver.go:335] Setting default libvirt URI to qemu:///system
	W0812 23:40:34.574742  679434 start.go:659] api.Load failed for download-only-20210812234017-679351: filestore "download-only-20210812234017-679351": Docker machine "download-only-20210812234017-679351" does not exist. Use "docker-machine ls" to list machines. Use "docker-machine create" to add a new one.
	I0812 23:40:34.603441  679434 start.go:278] selected driver: kvm2
	I0812 23:40:34.603457  679434 start.go:751] validating driver "kvm2" against &{Name:download-only-20210812234017-679351 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube-builds/iso/12122/minikube-v1.22.0-1628238775-12122.iso KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.25@sha256:6f936e3443b95cd918d77623bf7b595653bb382766e280290a02b4a349e88b79 Memory:6000 CPUs:2 DiskSize:20000 VMDriver: Driver:kvm2 HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.99.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.21.3
ClusterName:download-only-20210812234017-679351 Namespace:default APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI: NodeIP: NodePort:8443 NodeName:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.21.3 ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: MultiNodeRequested:false ExtraDisks:0}
	I0812 23:40:34.604290  679434 install.go:52] acquiring lock: {Name:mk900956b073697a4aa6c80a27c6bb0742a99a53 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0812 23:40:34.604459  679434 install.go:117] Validating docker-machine-driver-kvm2, PATH=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/bin:/home/jenkins/workspace/KVM_Linux_containerd_integration/out/:/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games:/usr/local/go/bin:/home/jenkins/go/bin:/usr/local/bin/:/usr/local/go/bin/:/home/jenkins/go/bin
	I0812 23:40:34.616047  679434 install.go:137] /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2 version is 1.22.0
	I0812 23:40:34.616738  679434 cni.go:93] Creating CNI manager for ""
	I0812 23:40:34.616754  679434 cni.go:163] "kvm2" driver + containerd runtime found, recommending bridge
	I0812 23:40:34.616764  679434 start_flags.go:277] config:
	{Name:download-only-20210812234017-679351 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube-builds/iso/12122/minikube-v1.22.0-1628238775-12122.iso KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.25@sha256:6f936e3443b95cd918d77623bf7b595653bb382766e280290a02b4a349e88b79 Memory:6000 CPUs:2 DiskSize:20000 VMDriver: Driver:kvm2 HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.99.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.22.0-rc.0 ClusterName:download-only-20210812234017-679351 Namespace:default APIServ
erName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI: NodeIP: NodePort:8443 NodeName:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.21.3 ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: MultiNodeRequested:false ExtraDisks:0}
	I0812 23:40:34.616923  679434 iso.go:123] acquiring lock: {Name:mke80f4e00d5590a17349e0875191e5cd211cb9b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0812 23:40:34.618696  679434 preload.go:131] Checking if preload exists for k8s version v1.22.0-rc.0 and runtime containerd
	I0812 23:40:34.667856  679434 preload.go:118] Found remote preload: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/preloaded-images-k8s-v11-v1.22.0-rc.0-containerd-overlay2-amd64.tar.lz4
	I0812 23:40:34.667908  679434 cache.go:56] Caching tarball of preloaded images
	I0812 23:40:34.668434  679434 preload.go:131] Checking if preload exists for k8s version v1.22.0-rc.0 and runtime containerd
	I0812 23:40:34.670661  679434 preload.go:237] getting checksum for preloaded-images-k8s-v11-v1.22.0-rc.0-containerd-overlay2-amd64.tar.lz4 ...
	I0812 23:40:34.736042  679434 download.go:92] Downloading: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/preloaded-images-k8s-v11-v1.22.0-rc.0-containerd-overlay2-amd64.tar.lz4?checksum=md5:569167d620e883cc7aa194927ed83d26 -> /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v11-v1.22.0-rc.0-containerd-overlay2-amd64.tar.lz4
	
	* 
	* The control plane node "" does not exist.
	  To start a cluster, run: "minikube start -p download-only-20210812234017-679351"

                                                
                                                
-- /stdout --
aaa_download_only_test.go:172: minikube logs failed with error: exit status 85
--- PASS: TestDownloadOnly/v1.22.0-rc.0/LogsDuration (0.07s)

                                                
                                    
x
+
TestDownloadOnly/DeleteAll (0.24s)

                                                
                                                
=== RUN   TestDownloadOnly/DeleteAll
aaa_download_only_test.go:189: (dbg) Run:  out/minikube-linux-amd64 delete --all
--- PASS: TestDownloadOnly/DeleteAll (0.24s)

                                                
                                    
x
+
TestDownloadOnly/DeleteAlwaysSucceeds (0.23s)

                                                
                                                
=== RUN   TestDownloadOnly/DeleteAlwaysSucceeds
aaa_download_only_test.go:201: (dbg) Run:  out/minikube-linux-amd64 delete -p download-only-20210812234017-679351
--- PASS: TestDownloadOnly/DeleteAlwaysSucceeds (0.23s)

                                                
                                    
x
+
TestOffline (140.3s)

                                                
                                                
=== RUN   TestOffline
=== PAUSE TestOffline

                                                
                                                

                                                
                                                
=== CONT  TestOffline

                                                
                                                
=== CONT  TestOffline
aab_offline_test.go:55: (dbg) Run:  out/minikube-linux-amd64 start -p offline-containerd-20210813001951-679351 --alsologtostderr -v=1 --memory=2048 --wait=true --driver=kvm2  --container-runtime=containerd

                                                
                                                
=== CONT  TestOffline
aab_offline_test.go:55: (dbg) Done: out/minikube-linux-amd64 start -p offline-containerd-20210813001951-679351 --alsologtostderr -v=1 --memory=2048 --wait=true --driver=kvm2  --container-runtime=containerd: (2m19.414568699s)
helpers_test.go:176: Cleaning up "offline-containerd-20210813001951-679351" profile ...
helpers_test.go:179: (dbg) Run:  out/minikube-linux-amd64 delete -p offline-containerd-20210813001951-679351
--- PASS: TestOffline (140.30s)

                                                
                                    
x
+
TestAddons/parallel/Registry (21.86s)

                                                
                                                
=== RUN   TestAddons/parallel/Registry
=== PAUSE TestAddons/parallel/Registry

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Registry

                                                
                                                
=== CONT  TestAddons/parallel/Registry
addons_test.go:284: registry stabilized in 21.244864ms

                                                
                                                
=== CONT  TestAddons/parallel/Registry
addons_test.go:286: (dbg) TestAddons/parallel/Registry: waiting 6m0s for pods matching "actual-registry=true" in namespace "kube-system" ...

                                                
                                                
=== CONT  TestAddons/parallel/Registry
helpers_test.go:343: "registry-h8bdz" [741146f0-8246-4f42-b83a-df85e603ade4] Running

                                                
                                                
=== CONT  TestAddons/parallel/Registry
addons_test.go:286: (dbg) TestAddons/parallel/Registry: actual-registry=true healthy within 5.026117901s
addons_test.go:289: (dbg) TestAddons/parallel/Registry: waiting 10m0s for pods matching "registry-proxy=true" in namespace "kube-system" ...

                                                
                                                
=== CONT  TestAddons/parallel/Registry
helpers_test.go:343: "registry-proxy-sxcvg" [b3a29e2c-f3c3-436c-af96-9927865ee827] Running

                                                
                                                
=== CONT  TestAddons/parallel/Registry
addons_test.go:289: (dbg) TestAddons/parallel/Registry: registry-proxy=true healthy within 5.027296339s
addons_test.go:294: (dbg) Run:  kubectl --context addons-20210812234043-679351 delete po -l run=registry-test --now
addons_test.go:299: (dbg) Run:  kubectl --context addons-20210812234043-679351 run --rm registry-test --restart=Never --image=busybox -it -- sh -c "wget --spider -S http://registry.kube-system.svc.cluster.local"

                                                
                                                
=== CONT  TestAddons/parallel/Registry
addons_test.go:299: (dbg) Done: kubectl --context addons-20210812234043-679351 run --rm registry-test --restart=Never --image=busybox -it -- sh -c "wget --spider -S http://registry.kube-system.svc.cluster.local": (11.037697162s)
addons_test.go:313: (dbg) Run:  out/minikube-linux-amd64 -p addons-20210812234043-679351 ip
2021/08/12 23:44:10 [DEBUG] GET http://192.168.50.160:5000
addons_test.go:342: (dbg) Run:  out/minikube-linux-amd64 -p addons-20210812234043-679351 addons disable registry --alsologtostderr -v=1
--- PASS: TestAddons/parallel/Registry (21.86s)

                                                
                                    
x
+
TestAddons/parallel/Ingress (47.21s)

                                                
                                                
=== RUN   TestAddons/parallel/Ingress
=== PAUSE TestAddons/parallel/Ingress

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Ingress
addons_test.go:158: (dbg) TestAddons/parallel/Ingress: waiting 12m0s for pods matching "app.kubernetes.io/name=ingress-nginx" in namespace "ingress-nginx" ...
helpers_test.go:343: "ingress-nginx-admission-create-brvbh" [036f9166-d59f-4cc0-a901-450fdef9491a] Succeeded: Initialized:PodCompleted / Ready:PodCompleted / ContainersReady:PodCompleted
addons_test.go:158: (dbg) TestAddons/parallel/Ingress: app.kubernetes.io/name=ingress-nginx healthy within 9.656454ms
addons_test.go:165: (dbg) Run:  kubectl --context addons-20210812234043-679351 replace --force -f testdata/nginx-ingv1.yaml
addons_test.go:180: (dbg) Run:  kubectl --context addons-20210812234043-679351 replace --force -f testdata/nginx-pod-svc.yaml
addons_test.go:185: (dbg) TestAddons/parallel/Ingress: waiting 4m0s for pods matching "run=nginx" in namespace "default" ...
helpers_test.go:343: "nginx" [3bea138d-e5d3-48f5-a74d-66cf0f996cc7] Pending / Ready:ContainersNotReady (containers with unready status: [nginx]) / ContainersReady:ContainersNotReady (containers with unready status: [nginx])

                                                
                                                
=== CONT  TestAddons/parallel/Ingress
helpers_test.go:343: "nginx" [3bea138d-e5d3-48f5-a74d-66cf0f996cc7] Running
addons_test.go:185: (dbg) TestAddons/parallel/Ingress: run=nginx healthy within 12.012522326s
addons_test.go:204: (dbg) Run:  out/minikube-linux-amd64 -p addons-20210812234043-679351 ssh "curl -s http://127.0.0.1/ -H 'Host: nginx.example.com'"
addons_test.go:165: (dbg) Run:  kubectl --context addons-20210812234043-679351 replace --force -f testdata/nginx-ingv1.yaml
addons_test.go:242: (dbg) Run:  out/minikube-linux-amd64 -p addons-20210812234043-679351 ssh "curl -s http://127.0.0.1/ -H 'Host: nginx.example.com'"

                                                
                                                
=== CONT  TestAddons/parallel/Ingress
addons_test.go:242: (dbg) Run:  out/minikube-linux-amd64 -p addons-20210812234043-679351 ssh "curl -s http://127.0.0.1/ -H 'Host: nginx.example.com'"

                                                
                                                
=== CONT  TestAddons/parallel/Ingress
addons_test.go:242: (dbg) Run:  out/minikube-linux-amd64 -p addons-20210812234043-679351 ssh "curl -s http://127.0.0.1/ -H 'Host: nginx.example.com'"

                                                
                                                
=== CONT  TestAddons/parallel/Ingress
addons_test.go:242: (dbg) Run:  out/minikube-linux-amd64 -p addons-20210812234043-679351 ssh "curl -s http://127.0.0.1/ -H 'Host: nginx.example.com'"
addons_test.go:265: (dbg) Run:  out/minikube-linux-amd64 -p addons-20210812234043-679351 addons disable ingress --alsologtostderr -v=1

                                                
                                                
=== CONT  TestAddons/parallel/Ingress
addons_test.go:265: (dbg) Done: out/minikube-linux-amd64 -p addons-20210812234043-679351 addons disable ingress --alsologtostderr -v=1: (30.027992854s)
--- PASS: TestAddons/parallel/Ingress (47.21s)

                                                
                                    
x
+
TestAddons/parallel/MetricsServer (5.73s)

                                                
                                                
=== RUN   TestAddons/parallel/MetricsServer
=== PAUSE TestAddons/parallel/MetricsServer

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/MetricsServer

                                                
                                                
=== CONT  TestAddons/parallel/MetricsServer
addons_test.go:361: metrics-server stabilized in 21.566575ms

                                                
                                                
=== CONT  TestAddons/parallel/MetricsServer
addons_test.go:363: (dbg) TestAddons/parallel/MetricsServer: waiting 6m0s for pods matching "k8s-app=metrics-server" in namespace "kube-system" ...

                                                
                                                
=== CONT  TestAddons/parallel/MetricsServer
helpers_test.go:343: "metrics-server-77c99ccb96-zd7j7" [d5eca170-fd6e-4864-b460-b453c2d637a1] Running

                                                
                                                
=== CONT  TestAddons/parallel/MetricsServer
addons_test.go:363: (dbg) TestAddons/parallel/MetricsServer: k8s-app=metrics-server healthy within 5.029453452s

                                                
                                                
=== CONT  TestAddons/parallel/MetricsServer
addons_test.go:369: (dbg) Run:  kubectl --context addons-20210812234043-679351 top pods -n kube-system

                                                
                                                
=== CONT  TestAddons/parallel/MetricsServer
addons_test.go:386: (dbg) Run:  out/minikube-linux-amd64 -p addons-20210812234043-679351 addons disable metrics-server --alsologtostderr -v=1
--- PASS: TestAddons/parallel/MetricsServer (5.73s)

                                                
                                    
x
+
TestAddons/parallel/HelmTiller (27.1s)

                                                
                                                
=== RUN   TestAddons/parallel/HelmTiller
=== PAUSE TestAddons/parallel/HelmTiller

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/HelmTiller

                                                
                                                
=== CONT  TestAddons/parallel/HelmTiller
addons_test.go:410: tiller-deploy stabilized in 21.077014ms

                                                
                                                
=== CONT  TestAddons/parallel/HelmTiller
addons_test.go:412: (dbg) TestAddons/parallel/HelmTiller: waiting 6m0s for pods matching "app=helm" in namespace "kube-system" ...

                                                
                                                
=== CONT  TestAddons/parallel/HelmTiller
helpers_test.go:343: "tiller-deploy-768d69497-sgdkb" [54a51813-46db-4a66-9a59-5a7c91d2eb00] Running

                                                
                                                
=== CONT  TestAddons/parallel/HelmTiller
addons_test.go:412: (dbg) TestAddons/parallel/HelmTiller: app=helm healthy within 5.029474842s

                                                
                                                
=== CONT  TestAddons/parallel/HelmTiller
addons_test.go:427: (dbg) Run:  kubectl --context addons-20210812234043-679351 run --rm helm-test --restart=Never --image=alpine/helm:2.16.3 -it --namespace=kube-system --serviceaccount=tiller -- version

                                                
                                                
=== CONT  TestAddons/parallel/HelmTiller
addons_test.go:427: (dbg) Done: kubectl --context addons-20210812234043-679351 run --rm helm-test --restart=Never --image=alpine/helm:2.16.3 -it --namespace=kube-system --serviceaccount=tiller -- version: (6.041774357s)
addons_test.go:432: kubectl --context addons-20210812234043-679351 run --rm helm-test --restart=Never --image=alpine/helm:2.16.3 -it --namespace=kube-system --serviceaccount=tiller -- version: unexpected stderr: Unable to use a TTY - input is not a terminal or the right kind of file
If you don't see a command prompt, try pressing enter.
Error attaching, falling back to logs: 
addons_test.go:427: (dbg) Run:  kubectl --context addons-20210812234043-679351 run --rm helm-test --restart=Never --image=alpine/helm:2.16.3 -it --namespace=kube-system --serviceaccount=tiller -- version

                                                
                                                
=== CONT  TestAddons/parallel/HelmTiller
addons_test.go:427: (dbg) Done: kubectl --context addons-20210812234043-679351 run --rm helm-test --restart=Never --image=alpine/helm:2.16.3 -it --namespace=kube-system --serviceaccount=tiller -- version: (14.749538131s)
addons_test.go:444: (dbg) Run:  out/minikube-linux-amd64 -p addons-20210812234043-679351 addons disable helm-tiller --alsologtostderr -v=1
--- PASS: TestAddons/parallel/HelmTiller (27.10s)

                                                
                                    
x
+
TestAddons/parallel/Olm (51.8s)

                                                
                                                
=== RUN   TestAddons/parallel/Olm
=== PAUSE TestAddons/parallel/Olm

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Olm
addons_test.go:463: catalog-operator stabilized in 2.860418ms
addons_test.go:467: olm-operator stabilized in 17.269608ms
addons_test.go:471: packageserver stabilized in 19.502862ms
addons_test.go:473: (dbg) TestAddons/parallel/Olm: waiting 6m0s for pods matching "app=catalog-operator" in namespace "olm" ...
helpers_test.go:343: "catalog-operator-75d496484d-42hpw" [00df968e-6d9e-467a-ad63-65f50325e486] Running

                                                
                                                
=== CONT  TestAddons/parallel/Olm
addons_test.go:473: (dbg) TestAddons/parallel/Olm: app=catalog-operator healthy within 5.009282129s
addons_test.go:476: (dbg) TestAddons/parallel/Olm: waiting 6m0s for pods matching "app=olm-operator" in namespace "olm" ...
helpers_test.go:343: "olm-operator-859c88c96-hpb85" [c0307293-fb38-410d-a464-df73477450ba] Running

                                                
                                                
=== CONT  TestAddons/parallel/Olm
addons_test.go:476: (dbg) TestAddons/parallel/Olm: app=olm-operator healthy within 5.015249389s
addons_test.go:479: (dbg) TestAddons/parallel/Olm: waiting 6m0s for pods matching "app=packageserver" in namespace "olm" ...
helpers_test.go:343: "packageserver-b5bc58955-gzhxc" [efd921a1-8b22-499c-a191-f2b12b95db41] Running
helpers_test.go:343: "packageserver-b5bc58955-jss6q" [8bd3afd0-1434-4d1c-86cc-dbcd39255f95] Running
helpers_test.go:343: "packageserver-b5bc58955-gzhxc" [efd921a1-8b22-499c-a191-f2b12b95db41] Running
helpers_test.go:343: "packageserver-b5bc58955-jss6q" [8bd3afd0-1434-4d1c-86cc-dbcd39255f95] Running
helpers_test.go:343: "packageserver-b5bc58955-gzhxc" [efd921a1-8b22-499c-a191-f2b12b95db41] Running
helpers_test.go:343: "packageserver-b5bc58955-jss6q" [8bd3afd0-1434-4d1c-86cc-dbcd39255f95] Running
helpers_test.go:343: "packageserver-b5bc58955-gzhxc" [efd921a1-8b22-499c-a191-f2b12b95db41] Running
helpers_test.go:343: "packageserver-b5bc58955-jss6q" [8bd3afd0-1434-4d1c-86cc-dbcd39255f95] Running
helpers_test.go:343: "packageserver-b5bc58955-gzhxc" [efd921a1-8b22-499c-a191-f2b12b95db41] Running
helpers_test.go:343: "packageserver-b5bc58955-jss6q" [8bd3afd0-1434-4d1c-86cc-dbcd39255f95] Running
helpers_test.go:343: "packageserver-b5bc58955-gzhxc" [efd921a1-8b22-499c-a191-f2b12b95db41] Running
addons_test.go:479: (dbg) TestAddons/parallel/Olm: app=packageserver healthy within 5.01640654s
addons_test.go:482: (dbg) TestAddons/parallel/Olm: waiting 6m0s for pods matching "olm.catalogSource=operatorhubio-catalog" in namespace "olm" ...
helpers_test.go:343: "operatorhubio-catalog-5cvnh" [42ec6cfe-b1f3-4637-91f3-9d488e9e75f9] Running

                                                
                                                
=== CONT  TestAddons/parallel/Olm
addons_test.go:482: (dbg) TestAddons/parallel/Olm: olm.catalogSource=operatorhubio-catalog healthy within 5.013735888s
addons_test.go:487: (dbg) Run:  kubectl --context addons-20210812234043-679351 create -f testdata/etcd.yaml
addons_test.go:494: (dbg) Run:  kubectl --context addons-20210812234043-679351 get csv -n my-etcd
addons_test.go:499: kubectl --context addons-20210812234043-679351 get csv -n my-etcd: unexpected stderr: No resources found in my-etcd namespace.
addons_test.go:494: (dbg) Run:  kubectl --context addons-20210812234043-679351 get csv -n my-etcd
addons_test.go:499: kubectl --context addons-20210812234043-679351 get csv -n my-etcd: unexpected stderr: No resources found in my-etcd namespace.

                                                
                                                
=== CONT  TestAddons/parallel/Olm
addons_test.go:494: (dbg) Run:  kubectl --context addons-20210812234043-679351 get csv -n my-etcd
addons_test.go:499: kubectl --context addons-20210812234043-679351 get csv -n my-etcd: unexpected stderr: No resources found in my-etcd namespace.

                                                
                                                
=== CONT  TestAddons/parallel/Olm
addons_test.go:494: (dbg) Run:  kubectl --context addons-20210812234043-679351 get csv -n my-etcd
addons_test.go:494: (dbg) Run:  kubectl --context addons-20210812234043-679351 get csv -n my-etcd
--- PASS: TestAddons/parallel/Olm (51.80s)

                                                
                                    
x
+
TestAddons/parallel/CSI (87.28s)

                                                
                                                
=== RUN   TestAddons/parallel/CSI
=== PAUSE TestAddons/parallel/CSI

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/CSI
addons_test.go:526: csi-hostpath-driver pods stabilized in 10.306446ms
addons_test.go:529: (dbg) Run:  kubectl --context addons-20210812234043-679351 create -f testdata/csi-hostpath-driver/pvc.yaml
addons_test.go:534: (dbg) TestAddons/parallel/CSI: waiting 6m0s for pvc "hpvc" in namespace "default" ...
helpers_test.go:393: (dbg) Run:  kubectl --context addons-20210812234043-679351 get pvc hpvc -o jsonpath={.status.phase} -n default

                                                
                                                
=== CONT  TestAddons/parallel/CSI
helpers_test.go:393: (dbg) Run:  kubectl --context addons-20210812234043-679351 get pvc hpvc -o jsonpath={.status.phase} -n default
addons_test.go:539: (dbg) Run:  kubectl --context addons-20210812234043-679351 create -f testdata/csi-hostpath-driver/pv-pod.yaml
addons_test.go:544: (dbg) TestAddons/parallel/CSI: waiting 6m0s for pods matching "app=task-pv-pod" in namespace "default" ...
helpers_test.go:343: "task-pv-pod" [a538a5f5-b59b-4f3e-b07a-6e43c094b49d] Pending
helpers_test.go:343: "task-pv-pod" [a538a5f5-b59b-4f3e-b07a-6e43c094b49d] Pending / Ready:ContainersNotReady (containers with unready status: [task-pv-container]) / ContainersReady:ContainersNotReady (containers with unready status: [task-pv-container])

                                                
                                                
=== CONT  TestAddons/parallel/CSI
helpers_test.go:343: "task-pv-pod" [a538a5f5-b59b-4f3e-b07a-6e43c094b49d] Running

                                                
                                                
=== CONT  TestAddons/parallel/CSI
addons_test.go:544: (dbg) TestAddons/parallel/CSI: app=task-pv-pod healthy within 29.013136041s
addons_test.go:549: (dbg) Run:  kubectl --context addons-20210812234043-679351 create -f testdata/csi-hostpath-driver/snapshot.yaml
addons_test.go:554: (dbg) TestAddons/parallel/CSI: waiting 6m0s for volume snapshot "new-snapshot-demo" in namespace "default" ...
helpers_test.go:418: (dbg) Run:  kubectl --context addons-20210812234043-679351 get volumesnapshot new-snapshot-demo -o jsonpath={.status.readyToUse} -n default
helpers_test.go:426: TestAddons/parallel/CSI: WARNING: volume snapshot get for "default" "new-snapshot-demo" returned: 

                                                
                                                
=== CONT  TestAddons/parallel/CSI
helpers_test.go:418: (dbg) Run:  kubectl --context addons-20210812234043-679351 get volumesnapshot new-snapshot-demo -o jsonpath={.status.readyToUse} -n default
addons_test.go:559: (dbg) Run:  kubectl --context addons-20210812234043-679351 delete pod task-pv-pod

                                                
                                                
=== CONT  TestAddons/parallel/CSI
addons_test.go:559: (dbg) Done: kubectl --context addons-20210812234043-679351 delete pod task-pv-pod: (7.32305187s)
addons_test.go:565: (dbg) Run:  kubectl --context addons-20210812234043-679351 delete pvc hpvc
addons_test.go:571: (dbg) Run:  kubectl --context addons-20210812234043-679351 create -f testdata/csi-hostpath-driver/pvc-restore.yaml
addons_test.go:576: (dbg) TestAddons/parallel/CSI: waiting 6m0s for pvc "hpvc-restore" in namespace "default" ...
helpers_test.go:393: (dbg) Run:  kubectl --context addons-20210812234043-679351 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:393: (dbg) Run:  kubectl --context addons-20210812234043-679351 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
addons_test.go:581: (dbg) Run:  kubectl --context addons-20210812234043-679351 create -f testdata/csi-hostpath-driver/pv-pod-restore.yaml
addons_test.go:586: (dbg) TestAddons/parallel/CSI: waiting 6m0s for pods matching "app=task-pv-pod-restore" in namespace "default" ...
helpers_test.go:343: "task-pv-pod-restore" [10367ce7-a20e-4a1b-901b-49fdeb5d63c0] Pending
helpers_test.go:343: "task-pv-pod-restore" [10367ce7-a20e-4a1b-901b-49fdeb5d63c0] Pending / Ready:ContainersNotReady (containers with unready status: [task-pv-container]) / ContainersReady:ContainersNotReady (containers with unready status: [task-pv-container])

                                                
                                                
=== CONT  TestAddons/parallel/CSI
helpers_test.go:343: "task-pv-pod-restore" [10367ce7-a20e-4a1b-901b-49fdeb5d63c0] Running
addons_test.go:586: (dbg) TestAddons/parallel/CSI: app=task-pv-pod-restore healthy within 24.042732825s
addons_test.go:591: (dbg) Run:  kubectl --context addons-20210812234043-679351 delete pod task-pv-pod-restore
addons_test.go:591: (dbg) Done: kubectl --context addons-20210812234043-679351 delete pod task-pv-pod-restore: (13.347850113s)
addons_test.go:595: (dbg) Run:  kubectl --context addons-20210812234043-679351 delete pvc hpvc-restore
addons_test.go:599: (dbg) Run:  kubectl --context addons-20210812234043-679351 delete volumesnapshot new-snapshot-demo
addons_test.go:603: (dbg) Run:  out/minikube-linux-amd64 -p addons-20210812234043-679351 addons disable csi-hostpath-driver --alsologtostderr -v=1
addons_test.go:603: (dbg) Done: out/minikube-linux-amd64 -p addons-20210812234043-679351 addons disable csi-hostpath-driver --alsologtostderr -v=1: (7.151580423s)
addons_test.go:607: (dbg) Run:  out/minikube-linux-amd64 -p addons-20210812234043-679351 addons disable volumesnapshots --alsologtostderr -v=1
--- PASS: TestAddons/parallel/CSI (87.28s)

                                                
                                    
x
+
TestAddons/parallel/GCPAuth (46s)

                                                
                                                
=== RUN   TestAddons/parallel/GCPAuth
=== PAUSE TestAddons/parallel/GCPAuth

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/GCPAuth

                                                
                                                
=== CONT  TestAddons/parallel/GCPAuth
addons_test.go:618: (dbg) Run:  kubectl --context addons-20210812234043-679351 create -f testdata/busybox.yaml

                                                
                                                
=== CONT  TestAddons/parallel/GCPAuth
addons_test.go:624: (dbg) TestAddons/parallel/GCPAuth: waiting 8m0s for pods matching "integration-test=busybox" in namespace "default" ...
helpers_test.go:343: "busybox" [6d320f04-3d0a-4efd-b735-a600585ea907] Pending
helpers_test.go:343: "busybox" [6d320f04-3d0a-4efd-b735-a600585ea907] Pending / Ready:ContainersNotReady (containers with unready status: [busybox]) / ContainersReady:ContainersNotReady (containers with unready status: [busybox])
helpers_test.go:343: "busybox" [6d320f04-3d0a-4efd-b735-a600585ea907] Running

                                                
                                                
=== CONT  TestAddons/parallel/GCPAuth
addons_test.go:624: (dbg) TestAddons/parallel/GCPAuth: integration-test=busybox healthy within 8.024207138s
addons_test.go:630: (dbg) Run:  kubectl --context addons-20210812234043-679351 exec busybox -- /bin/sh -c "printenv GOOGLE_APPLICATION_CREDENTIALS"
addons_test.go:667: (dbg) Run:  kubectl --context addons-20210812234043-679351 exec busybox -- /bin/sh -c "printenv GOOGLE_CLOUD_PROJECT"
addons_test.go:683: (dbg) Run:  kubectl --context addons-20210812234043-679351 apply -f testdata/private-image.yaml
addons_test.go:690: (dbg) TestAddons/parallel/GCPAuth: waiting 8m0s for pods matching "integration-test=private-image" in namespace "default" ...

                                                
                                                
=== CONT  TestAddons/parallel/GCPAuth
helpers_test.go:343: "private-image-7ff9c8c74f-ndc96" [ba95e39f-0d50-4a43-a00e-626342c5a069] Pending / Ready:ContainersNotReady (containers with unready status: [private-image]) / ContainersReady:ContainersNotReady (containers with unready status: [private-image])

                                                
                                                
=== CONT  TestAddons/parallel/GCPAuth
helpers_test.go:343: "private-image-7ff9c8c74f-ndc96" [ba95e39f-0d50-4a43-a00e-626342c5a069] Running

                                                
                                                
=== CONT  TestAddons/parallel/GCPAuth
addons_test.go:690: (dbg) TestAddons/parallel/GCPAuth: integration-test=private-image healthy within 16.012304225s
addons_test.go:696: (dbg) Run:  kubectl --context addons-20210812234043-679351 apply -f testdata/private-image-eu.yaml
addons_test.go:703: (dbg) TestAddons/parallel/GCPAuth: waiting 8m0s for pods matching "integration-test=private-image-eu" in namespace "default" ...

                                                
                                                
=== CONT  TestAddons/parallel/GCPAuth
helpers_test.go:343: "private-image-eu-5956d58f9f-cqr6s" [2ba3ca44-24b5-4452-b954-2b698fa136a1] Pending / Ready:ContainersNotReady (containers with unready status: [private-image-eu]) / ContainersReady:ContainersNotReady (containers with unready status: [private-image-eu])

                                                
                                                
=== CONT  TestAddons/parallel/GCPAuth
helpers_test.go:343: "private-image-eu-5956d58f9f-cqr6s" [2ba3ca44-24b5-4452-b954-2b698fa136a1] Running

                                                
                                                
=== CONT  TestAddons/parallel/GCPAuth
addons_test.go:703: (dbg) TestAddons/parallel/GCPAuth: integration-test=private-image-eu healthy within 9.015950394s
addons_test.go:709: (dbg) Run:  out/minikube-linux-amd64 -p addons-20210812234043-679351 addons disable gcp-auth --alsologtostderr -v=1

                                                
                                                
=== CONT  TestAddons/parallel/GCPAuth
addons_test.go:709: (dbg) Done: out/minikube-linux-amd64 -p addons-20210812234043-679351 addons disable gcp-auth --alsologtostderr -v=1: (11.494334649s)
--- PASS: TestAddons/parallel/GCPAuth (46.00s)

                                                
                                    
x
+
TestCertOptions (80.03s)

                                                
                                                
=== RUN   TestCertOptions
=== PAUSE TestCertOptions

                                                
                                                

                                                
                                                
=== CONT  TestCertOptions
cert_options_test.go:47: (dbg) Run:  out/minikube-linux-amd64 start -p cert-options-20210813002211-679351 --memory=2048 --apiserver-ips=127.0.0.1 --apiserver-ips=192.168.15.15 --apiserver-names=localhost --apiserver-names=www.google.com --apiserver-port=8555 --driver=kvm2  --container-runtime=containerd

                                                
                                                
=== CONT  TestCertOptions
cert_options_test.go:47: (dbg) Done: out/minikube-linux-amd64 start -p cert-options-20210813002211-679351 --memory=2048 --apiserver-ips=127.0.0.1 --apiserver-ips=192.168.15.15 --apiserver-names=localhost --apiserver-names=www.google.com --apiserver-port=8555 --driver=kvm2  --container-runtime=containerd: (1m18.21527962s)
cert_options_test.go:58: (dbg) Run:  out/minikube-linux-amd64 -p cert-options-20210813002211-679351 ssh "openssl x509 -text -noout -in /var/lib/minikube/certs/apiserver.crt"
cert_options_test.go:73: (dbg) Run:  kubectl --context cert-options-20210813002211-679351 config view
helpers_test.go:176: Cleaning up "cert-options-20210813002211-679351" profile ...
helpers_test.go:179: (dbg) Run:  out/minikube-linux-amd64 delete -p cert-options-20210813002211-679351
helpers_test.go:179: (dbg) Done: out/minikube-linux-amd64 delete -p cert-options-20210813002211-679351: (1.414726473s)
--- PASS: TestCertOptions (80.03s)

                                                
                                    
x
+
TestForceSystemdFlag (92.05s)

                                                
                                                
=== RUN   TestForceSystemdFlag
=== PAUSE TestForceSystemdFlag

                                                
                                                

                                                
                                                
=== CONT  TestForceSystemdFlag
docker_test.go:85: (dbg) Run:  out/minikube-linux-amd64 start -p force-systemd-flag-20210813002108-679351 --memory=2048 --force-systemd --alsologtostderr -v=5 --driver=kvm2  --container-runtime=containerd
E0813 00:21:09.471898  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0813 00:21:53.178052  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory

                                                
                                                
=== CONT  TestForceSystemdFlag
docker_test.go:85: (dbg) Done: out/minikube-linux-amd64 start -p force-systemd-flag-20210813002108-679351 --memory=2048 --force-systemd --alsologtostderr -v=5 --driver=kvm2  --container-runtime=containerd: (1m30.379566315s)
docker_test.go:113: (dbg) Run:  out/minikube-linux-amd64 -p force-systemd-flag-20210813002108-679351 ssh "cat /etc/containerd/config.toml"
helpers_test.go:176: Cleaning up "force-systemd-flag-20210813002108-679351" profile ...
helpers_test.go:179: (dbg) Run:  out/minikube-linux-amd64 delete -p force-systemd-flag-20210813002108-679351
helpers_test.go:179: (dbg) Done: out/minikube-linux-amd64 delete -p force-systemd-flag-20210813002108-679351: (1.394687273s)
--- PASS: TestForceSystemdFlag (92.05s)

                                                
                                    
x
+
TestForceSystemdEnv (73.96s)

                                                
                                                
=== RUN   TestForceSystemdEnv
=== PAUSE TestForceSystemdEnv

                                                
                                                

                                                
                                                
=== CONT  TestForceSystemdEnv

                                                
                                                
=== CONT  TestForceSystemdEnv
docker_test.go:136: (dbg) Run:  out/minikube-linux-amd64 start -p force-systemd-env-20210813001951-679351 --memory=2048 --alsologtostderr -v=5 --driver=kvm2  --container-runtime=containerd

                                                
                                                
=== CONT  TestForceSystemdEnv
docker_test.go:136: (dbg) Done: out/minikube-linux-amd64 start -p force-systemd-env-20210813001951-679351 --memory=2048 --alsologtostderr -v=5 --driver=kvm2  --container-runtime=containerd: (1m12.643131631s)
docker_test.go:113: (dbg) Run:  out/minikube-linux-amd64 -p force-systemd-env-20210813001951-679351 ssh "cat /etc/containerd/config.toml"
helpers_test.go:176: Cleaning up "force-systemd-env-20210813001951-679351" profile ...
helpers_test.go:179: (dbg) Run:  out/minikube-linux-amd64 delete -p force-systemd-env-20210813001951-679351
helpers_test.go:179: (dbg) Done: out/minikube-linux-amd64 delete -p force-systemd-env-20210813001951-679351: (1.088197074s)
--- PASS: TestForceSystemdEnv (73.96s)

                                                
                                    
x
+
TestKVMDriverInstallOrUpdate (2.42s)

                                                
                                                
=== RUN   TestKVMDriverInstallOrUpdate
=== PAUSE TestKVMDriverInstallOrUpdate

                                                
                                                

                                                
                                                
=== CONT  TestKVMDriverInstallOrUpdate
--- PASS: TestKVMDriverInstallOrUpdate (2.42s)

                                                
                                    
x
+
TestErrorSpam/setup (60.33s)

                                                
                                                
=== RUN   TestErrorSpam/setup
error_spam_test.go:78: (dbg) Run:  out/minikube-linux-amd64 start -p nospam-20210812234713-679351 -n=1 --memory=2250 --wait=false --log_dir=/tmp/nospam-20210812234713-679351 --driver=kvm2  --container-runtime=containerd
error_spam_test.go:78: (dbg) Done: out/minikube-linux-amd64 start -p nospam-20210812234713-679351 -n=1 --memory=2250 --wait=false --log_dir=/tmp/nospam-20210812234713-679351 --driver=kvm2  --container-runtime=containerd: (1m0.325560828s)
--- PASS: TestErrorSpam/setup (60.33s)

                                                
                                    
x
+
TestErrorSpam/start (0.43s)

                                                
                                                
=== RUN   TestErrorSpam/start
error_spam_test.go:213: Cleaning up 1 logfile(s) ...
error_spam_test.go:156: (dbg) Run:  out/minikube-linux-amd64 -p nospam-20210812234713-679351 --log_dir /tmp/nospam-20210812234713-679351 start --dry-run
error_spam_test.go:156: (dbg) Run:  out/minikube-linux-amd64 -p nospam-20210812234713-679351 --log_dir /tmp/nospam-20210812234713-679351 start --dry-run
error_spam_test.go:179: (dbg) Run:  out/minikube-linux-amd64 -p nospam-20210812234713-679351 --log_dir /tmp/nospam-20210812234713-679351 start --dry-run
--- PASS: TestErrorSpam/start (0.43s)

                                                
                                    
x
+
TestErrorSpam/status (0.74s)

                                                
                                                
=== RUN   TestErrorSpam/status
error_spam_test.go:213: Cleaning up 0 logfile(s) ...
error_spam_test.go:156: (dbg) Run:  out/minikube-linux-amd64 -p nospam-20210812234713-679351 --log_dir /tmp/nospam-20210812234713-679351 status
error_spam_test.go:156: (dbg) Run:  out/minikube-linux-amd64 -p nospam-20210812234713-679351 --log_dir /tmp/nospam-20210812234713-679351 status
error_spam_test.go:179: (dbg) Run:  out/minikube-linux-amd64 -p nospam-20210812234713-679351 --log_dir /tmp/nospam-20210812234713-679351 status
--- PASS: TestErrorSpam/status (0.74s)

                                                
                                    
x
+
TestErrorSpam/pause (3.68s)

                                                
                                                
=== RUN   TestErrorSpam/pause
error_spam_test.go:213: Cleaning up 0 logfile(s) ...
error_spam_test.go:156: (dbg) Run:  out/minikube-linux-amd64 -p nospam-20210812234713-679351 --log_dir /tmp/nospam-20210812234713-679351 pause
error_spam_test.go:156: (dbg) Done: out/minikube-linux-amd64 -p nospam-20210812234713-679351 --log_dir /tmp/nospam-20210812234713-679351 pause: (2.710513904s)
error_spam_test.go:156: (dbg) Run:  out/minikube-linux-amd64 -p nospam-20210812234713-679351 --log_dir /tmp/nospam-20210812234713-679351 pause
error_spam_test.go:179: (dbg) Run:  out/minikube-linux-amd64 -p nospam-20210812234713-679351 --log_dir /tmp/nospam-20210812234713-679351 pause
--- PASS: TestErrorSpam/pause (3.68s)

                                                
                                    
x
+
TestErrorSpam/unpause (1.58s)

                                                
                                                
=== RUN   TestErrorSpam/unpause
error_spam_test.go:213: Cleaning up 0 logfile(s) ...
error_spam_test.go:156: (dbg) Run:  out/minikube-linux-amd64 -p nospam-20210812234713-679351 --log_dir /tmp/nospam-20210812234713-679351 unpause
error_spam_test.go:156: (dbg) Run:  out/minikube-linux-amd64 -p nospam-20210812234713-679351 --log_dir /tmp/nospam-20210812234713-679351 unpause
error_spam_test.go:179: (dbg) Run:  out/minikube-linux-amd64 -p nospam-20210812234713-679351 --log_dir /tmp/nospam-20210812234713-679351 unpause
--- PASS: TestErrorSpam/unpause (1.58s)

                                                
                                    
x
+
TestErrorSpam/stop (6.26s)

                                                
                                                
=== RUN   TestErrorSpam/stop
error_spam_test.go:213: Cleaning up 0 logfile(s) ...
error_spam_test.go:156: (dbg) Run:  out/minikube-linux-amd64 -p nospam-20210812234713-679351 --log_dir /tmp/nospam-20210812234713-679351 stop
error_spam_test.go:156: (dbg) Done: out/minikube-linux-amd64 -p nospam-20210812234713-679351 --log_dir /tmp/nospam-20210812234713-679351 stop: (6.107191082s)
error_spam_test.go:156: (dbg) Run:  out/minikube-linux-amd64 -p nospam-20210812234713-679351 --log_dir /tmp/nospam-20210812234713-679351 stop
error_spam_test.go:179: (dbg) Run:  out/minikube-linux-amd64 -p nospam-20210812234713-679351 --log_dir /tmp/nospam-20210812234713-679351 stop
--- PASS: TestErrorSpam/stop (6.26s)

                                                
                                    
x
+
TestFunctional/serial/CopySyncFile (0s)

                                                
                                                
=== RUN   TestFunctional/serial/CopySyncFile
functional_test.go:1606: local sync path: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/files/etc/test/nested/copy/679351/hosts
--- PASS: TestFunctional/serial/CopySyncFile (0.00s)

                                                
                                    
x
+
TestFunctional/serial/StartWithProxy (79.47s)

                                                
                                                
=== RUN   TestFunctional/serial/StartWithProxy
functional_test.go:1982: (dbg) Run:  out/minikube-linux-amd64 start -p functional-20210812234826-679351 --memory=4000 --apiserver-port=8441 --wait=all --driver=kvm2  --container-runtime=containerd
E0812 23:48:49.240897  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0812 23:48:49.246926  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0812 23:48:49.257180  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0812 23:48:49.277437  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0812 23:48:49.317875  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0812 23:48:49.398256  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0812 23:48:49.558676  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0812 23:48:49.879355  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0812 23:48:50.520329  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0812 23:48:51.801173  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0812 23:48:54.362418  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0812 23:48:59.483342  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0812 23:49:10.613129  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0812 23:49:31.093378  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
functional_test.go:1982: (dbg) Done: out/minikube-linux-amd64 start -p functional-20210812234826-679351 --memory=4000 --apiserver-port=8441 --wait=all --driver=kvm2  --container-runtime=containerd: (1m19.464818319s)
--- PASS: TestFunctional/serial/StartWithProxy (79.47s)

                                                
                                    
x
+
TestFunctional/serial/AuditLog (0s)

                                                
                                                
=== RUN   TestFunctional/serial/AuditLog
--- PASS: TestFunctional/serial/AuditLog (0.00s)

                                                
                                    
x
+
TestFunctional/serial/SoftStart (29.08s)

                                                
                                                
=== RUN   TestFunctional/serial/SoftStart
functional_test.go:627: (dbg) Run:  out/minikube-linux-amd64 start -p functional-20210812234826-679351 --alsologtostderr -v=8
E0812 23:50:12.054353  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
functional_test.go:627: (dbg) Done: out/minikube-linux-amd64 start -p functional-20210812234826-679351 --alsologtostderr -v=8: (29.079173444s)
functional_test.go:631: soft start took 29.079818217s for "functional-20210812234826-679351" cluster.
--- PASS: TestFunctional/serial/SoftStart (29.08s)

                                                
                                    
x
+
TestFunctional/serial/KubeContext (0.05s)

                                                
                                                
=== RUN   TestFunctional/serial/KubeContext
functional_test.go:647: (dbg) Run:  kubectl config current-context
--- PASS: TestFunctional/serial/KubeContext (0.05s)

                                                
                                    
x
+
TestFunctional/serial/KubectlGetPods (0.21s)

                                                
                                                
=== RUN   TestFunctional/serial/KubectlGetPods
functional_test.go:660: (dbg) Run:  kubectl --context functional-20210812234826-679351 get po -A
--- PASS: TestFunctional/serial/KubectlGetPods (0.21s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/add_remote (8.11s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/add_remote
functional_test.go:982: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 cache add k8s.gcr.io/pause:3.1
functional_test.go:982: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 cache add k8s.gcr.io/pause:3.3
functional_test.go:982: (dbg) Done: out/minikube-linux-amd64 -p functional-20210812234826-679351 cache add k8s.gcr.io/pause:3.3: (4.118144495s)
functional_test.go:982: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 cache add k8s.gcr.io/pause:latest
functional_test.go:982: (dbg) Done: out/minikube-linux-amd64 -p functional-20210812234826-679351 cache add k8s.gcr.io/pause:latest: (3.001008749s)
--- PASS: TestFunctional/serial/CacheCmd/cache/add_remote (8.11s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/add_local (1.77s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/add_local
functional_test.go:1012: (dbg) Run:  docker build -t minikube-local-cache-test:functional-20210812234826-679351 /tmp/functional-20210812234826-679351957620759
functional_test.go:1024: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 cache add minikube-local-cache-test:functional-20210812234826-679351
functional_test.go:1024: (dbg) Done: out/minikube-linux-amd64 -p functional-20210812234826-679351 cache add minikube-local-cache-test:functional-20210812234826-679351: (1.48520319s)
functional_test.go:1029: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 cache delete minikube-local-cache-test:functional-20210812234826-679351
functional_test.go:1018: (dbg) Run:  docker rmi minikube-local-cache-test:functional-20210812234826-679351
--- PASS: TestFunctional/serial/CacheCmd/cache/add_local (1.77s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/delete_k8s.gcr.io/pause:3.3 (0.06s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/delete_k8s.gcr.io/pause:3.3
functional_test.go:1036: (dbg) Run:  out/minikube-linux-amd64 cache delete k8s.gcr.io/pause:3.3
--- PASS: TestFunctional/serial/CacheCmd/cache/delete_k8s.gcr.io/pause:3.3 (0.06s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/list (0.05s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/list
functional_test.go:1043: (dbg) Run:  out/minikube-linux-amd64 cache list
--- PASS: TestFunctional/serial/CacheCmd/cache/list (0.05s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node (0.23s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node
functional_test.go:1056: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh sudo crictl images
--- PASS: TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node (0.23s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/cache_reload (2.52s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/cache_reload
functional_test.go:1078: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh sudo crictl rmi k8s.gcr.io/pause:latest
functional_test.go:1084: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh sudo crictl inspecti k8s.gcr.io/pause:latest
functional_test.go:1084: (dbg) Non-zero exit: out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh sudo crictl inspecti k8s.gcr.io/pause:latest: exit status 1 (249.259597ms)

                                                
                                                
-- stdout --
	FATA[0000] no such image "k8s.gcr.io/pause:latest" present 

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test.go:1089: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 cache reload
functional_test.go:1089: (dbg) Done: out/minikube-linux-amd64 -p functional-20210812234826-679351 cache reload: (1.780724649s)
functional_test.go:1094: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh sudo crictl inspecti k8s.gcr.io/pause:latest
--- PASS: TestFunctional/serial/CacheCmd/cache/cache_reload (2.52s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/delete (0.11s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/delete
functional_test.go:1103: (dbg) Run:  out/minikube-linux-amd64 cache delete k8s.gcr.io/pause:3.1
functional_test.go:1103: (dbg) Run:  out/minikube-linux-amd64 cache delete k8s.gcr.io/pause:latest
--- PASS: TestFunctional/serial/CacheCmd/cache/delete (0.11s)

                                                
                                    
x
+
TestFunctional/serial/MinikubeKubectlCmd (0.12s)

                                                
                                                
=== RUN   TestFunctional/serial/MinikubeKubectlCmd
functional_test.go:678: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 kubectl -- --context functional-20210812234826-679351 get pods
--- PASS: TestFunctional/serial/MinikubeKubectlCmd (0.12s)

                                                
                                    
x
+
TestFunctional/serial/MinikubeKubectlCmdDirectly (0.11s)

                                                
                                                
=== RUN   TestFunctional/serial/MinikubeKubectlCmdDirectly
functional_test.go:701: (dbg) Run:  out/kubectl --context functional-20210812234826-679351 get pods
--- PASS: TestFunctional/serial/MinikubeKubectlCmdDirectly (0.11s)

                                                
                                    
x
+
TestFunctional/serial/ExtraConfig (37.57s)

                                                
                                                
=== RUN   TestFunctional/serial/ExtraConfig
functional_test.go:715: (dbg) Run:  out/minikube-linux-amd64 start -p functional-20210812234826-679351 --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision --wait=all
functional_test.go:715: (dbg) Done: out/minikube-linux-amd64 start -p functional-20210812234826-679351 --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision --wait=all: (37.571930796s)
functional_test.go:719: restart took 37.5720301s for "functional-20210812234826-679351" cluster.
--- PASS: TestFunctional/serial/ExtraConfig (37.57s)

                                                
                                    
x
+
TestFunctional/serial/ComponentHealth (0.07s)

                                                
                                                
=== RUN   TestFunctional/serial/ComponentHealth
functional_test.go:766: (dbg) Run:  kubectl --context functional-20210812234826-679351 get po -l tier=control-plane -n kube-system -o=json
functional_test.go:780: etcd phase: Running
functional_test.go:790: etcd status: Ready
functional_test.go:780: kube-apiserver phase: Running
functional_test.go:790: kube-apiserver status: Ready
functional_test.go:780: kube-controller-manager phase: Running
functional_test.go:790: kube-controller-manager status: Ready
functional_test.go:780: kube-scheduler phase: Running
functional_test.go:790: kube-scheduler status: Ready
--- PASS: TestFunctional/serial/ComponentHealth (0.07s)

                                                
                                    
x
+
TestFunctional/serial/LogsCmd (1.41s)

                                                
                                                
=== RUN   TestFunctional/serial/LogsCmd
functional_test.go:1165: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 logs
functional_test.go:1165: (dbg) Done: out/minikube-linux-amd64 -p functional-20210812234826-679351 logs: (1.405120711s)
--- PASS: TestFunctional/serial/LogsCmd (1.41s)

                                                
                                    
x
+
TestFunctional/serial/LogsFileCmd (1.37s)

                                                
                                                
=== RUN   TestFunctional/serial/LogsFileCmd
functional_test.go:1181: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 logs --file /tmp/functional-20210812234826-679351919649418/logs.txt
functional_test.go:1181: (dbg) Done: out/minikube-linux-amd64 -p functional-20210812234826-679351 logs --file /tmp/functional-20210812234826-679351919649418/logs.txt: (1.364830197s)
--- PASS: TestFunctional/serial/LogsFileCmd (1.37s)

                                                
                                    
x
+
TestFunctional/parallel/ConfigCmd (0.39s)

                                                
                                                
=== RUN   TestFunctional/parallel/ConfigCmd
=== PAUSE TestFunctional/parallel/ConfigCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ConfigCmd
functional_test.go:1129: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 config unset cpus

                                                
                                                
=== CONT  TestFunctional/parallel/ConfigCmd
functional_test.go:1129: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 config get cpus

                                                
                                                
=== CONT  TestFunctional/parallel/ConfigCmd
functional_test.go:1129: (dbg) Non-zero exit: out/minikube-linux-amd64 -p functional-20210812234826-679351 config get cpus: exit status 14 (79.765534ms)

                                                
                                                
** stderr ** 
	Error: specified key could not be found in config

                                                
                                                
** /stderr **
functional_test.go:1129: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 config set cpus 2
functional_test.go:1129: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 config get cpus

                                                
                                                
=== CONT  TestFunctional/parallel/ConfigCmd
functional_test.go:1129: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 config unset cpus
functional_test.go:1129: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 config get cpus
functional_test.go:1129: (dbg) Non-zero exit: out/minikube-linux-amd64 -p functional-20210812234826-679351 config get cpus: exit status 14 (52.366212ms)

                                                
                                                
** stderr ** 
	Error: specified key could not be found in config

                                                
                                                
** /stderr **
--- PASS: TestFunctional/parallel/ConfigCmd (0.39s)

                                                
                                    
x
+
TestFunctional/parallel/DashboardCmd (4.32s)

                                                
                                                
=== RUN   TestFunctional/parallel/DashboardCmd
=== PAUSE TestFunctional/parallel/DashboardCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/DashboardCmd
functional_test.go:857: (dbg) daemon: [out/minikube-linux-amd64 dashboard --url --port 36195 -p functional-20210812234826-679351 --alsologtostderr -v=1]

                                                
                                                
=== CONT  TestFunctional/parallel/DashboardCmd
functional_test.go:862: (dbg) stopping [out/minikube-linux-amd64 dashboard --url --port 36195 -p functional-20210812234826-679351 --alsologtostderr -v=1] ...
helpers_test.go:507: unable to kill pid 684548: os: process already finished
--- PASS: TestFunctional/parallel/DashboardCmd (4.32s)

                                                
                                    
x
+
TestFunctional/parallel/DryRun (0.34s)

                                                
                                                
=== RUN   TestFunctional/parallel/DryRun
=== PAUSE TestFunctional/parallel/DryRun

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/DryRun
functional_test.go:919: (dbg) Run:  out/minikube-linux-amd64 start -p functional-20210812234826-679351 --dry-run --memory 250MB --alsologtostderr --driver=kvm2  --container-runtime=containerd
functional_test.go:919: (dbg) Non-zero exit: out/minikube-linux-amd64 start -p functional-20210812234826-679351 --dry-run --memory 250MB --alsologtostderr --driver=kvm2  --container-runtime=containerd: exit status 23 (173.748157ms)

                                                
                                                
-- stdout --
	* [functional-20210812234826-679351] minikube v1.22.0 on Debian 9.13 (kvm/amd64)
	  - KUBECONFIG=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/kubeconfig
	  - MINIKUBE_BIN=out/minikube-linux-amd64
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube
	  - MINIKUBE_LOCATION=12230
	* Using the kvm2 driver based on existing profile
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0812 23:51:35.649030  684210 out.go:298] Setting OutFile to fd 1 ...
	I0812 23:51:35.649106  684210 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0812 23:51:35.649109  684210 out.go:311] Setting ErrFile to fd 2...
	I0812 23:51:35.649113  684210 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0812 23:51:35.649226  684210 root.go:313] Updating PATH: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/bin
	I0812 23:51:35.649455  684210 out.go:305] Setting JSON to false
	I0812 23:51:35.688479  684210 start.go:111] hostinfo: {"hostname":"debian-jenkins-agent-10","uptime":12859,"bootTime":1628799437,"procs":185,"os":"linux","platform":"debian","platformFamily":"debian","platformVersion":"9.13","kernelVersion":"4.9.0-16-amd64","kernelArch":"x86_64","virtualizationSystem":"kvm","virtualizationRole":"guest","hostId":"c29e0b88-ef83-6765-d2fa-208fdce1af32"}
	I0812 23:51:35.688602  684210 start.go:121] virtualization: kvm guest
	I0812 23:51:35.695122  684210 out.go:177] * [functional-20210812234826-679351] minikube v1.22.0 on Debian 9.13 (kvm/amd64)
	I0812 23:51:35.696949  684210 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/kubeconfig
	I0812 23:51:35.698325  684210 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-amd64
	I0812 23:51:35.699858  684210 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube
	I0812 23:51:35.701192  684210 out.go:177]   - MINIKUBE_LOCATION=12230
	I0812 23:51:35.702182  684210 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0812 23:51:35.702242  684210 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0812 23:51:35.714123  684210 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:38873
	I0812 23:51:35.714634  684210 main.go:130] libmachine: () Calling .GetVersion
	I0812 23:51:35.715392  684210 main.go:130] libmachine: Using API Version  1
	I0812 23:51:35.715417  684210 main.go:130] libmachine: () Calling .SetConfigRaw
	I0812 23:51:35.715860  684210 main.go:130] libmachine: () Calling .GetMachineName
	I0812 23:51:35.716065  684210 main.go:130] libmachine: (functional-20210812234826-679351) Calling .DriverName
	I0812 23:51:35.716289  684210 driver.go:335] Setting default libvirt URI to qemu:///system
	I0812 23:51:35.716713  684210 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0812 23:51:35.716757  684210 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0812 23:51:35.727966  684210 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:45965
	I0812 23:51:35.728387  684210 main.go:130] libmachine: () Calling .GetVersion
	I0812 23:51:35.728880  684210 main.go:130] libmachine: Using API Version  1
	I0812 23:51:35.728905  684210 main.go:130] libmachine: () Calling .SetConfigRaw
	I0812 23:51:35.729248  684210 main.go:130] libmachine: () Calling .GetMachineName
	I0812 23:51:35.729435  684210 main.go:130] libmachine: (functional-20210812234826-679351) Calling .DriverName
	I0812 23:51:35.761055  684210 out.go:177] * Using the kvm2 driver based on existing profile
	I0812 23:51:35.761096  684210 start.go:278] selected driver: kvm2
	I0812 23:51:35.761105  684210 start.go:751] validating driver "kvm2" against &{Name:functional-20210812234826-679351 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube-builds/iso/12122/minikube-v1.22.0-1628238775-12122.iso KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.25@sha256:6f936e3443b95cd918d77623bf7b595653bb382766e280290a02b4a349e88b79 Memory:4000 CPUs:2 DiskSize:20000 VMDriver: Driver:kvm2 HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.99.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.21.3 Clu
sterName:functional-20210812234826-679351 Namespace:default APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI: NodeIP: NodePort:8441 NodeName:} Nodes:[{Name: IP:192.168.50.102 Port:8441 KubernetesVersion:v1.21.3 ControlPlane:true Worker:true}] Addons:map[ambassador:false auto-pause:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false helm-tiller:false ingress:false ingress-dns:false istio:false istio-provisioner:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:
false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false volumesnapshots:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: MultiNodeRequested:false ExtraDisks:0}
	I0812 23:51:35.761335  684210 start.go:762] status for kvm2: {Installed:true Healthy:true Running:true NeedsImprovement:false Error:<nil> Reason: Fix: Doc:}
	I0812 23:51:35.763965  684210 out.go:177] 
	W0812 23:51:35.764171  684210 out.go:242] X Exiting due to RSRC_INSUFFICIENT_REQ_MEMORY: Requested memory allocation 250MiB is less than the usable minimum of 1800MB
	X Exiting due to RSRC_INSUFFICIENT_REQ_MEMORY: Requested memory allocation 250MiB is less than the usable minimum of 1800MB
	I0812 23:51:35.765496  684210 out.go:177] 

                                                
                                                
** /stderr **
functional_test.go:934: (dbg) Run:  out/minikube-linux-amd64 start -p functional-20210812234826-679351 --dry-run --alsologtostderr -v=1 --driver=kvm2  --container-runtime=containerd
--- PASS: TestFunctional/parallel/DryRun (0.34s)

                                                
                                    
x
+
TestFunctional/parallel/InternationalLanguage (0.18s)

                                                
                                                
=== RUN   TestFunctional/parallel/InternationalLanguage
=== PAUSE TestFunctional/parallel/InternationalLanguage

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/InternationalLanguage
functional_test.go:956: (dbg) Run:  out/minikube-linux-amd64 start -p functional-20210812234826-679351 --dry-run --memory 250MB --alsologtostderr --driver=kvm2  --container-runtime=containerd

                                                
                                                
=== CONT  TestFunctional/parallel/InternationalLanguage
functional_test.go:956: (dbg) Non-zero exit: out/minikube-linux-amd64 start -p functional-20210812234826-679351 --dry-run --memory 250MB --alsologtostderr --driver=kvm2  --container-runtime=containerd: exit status 23 (176.346983ms)

                                                
                                                
-- stdout --
	* [functional-20210812234826-679351] minikube v1.22.0 sur Debian 9.13 (kvm/amd64)
	  - KUBECONFIG=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/kubeconfig
	  - MINIKUBE_BIN=out/minikube-linux-amd64
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube
	  - MINIKUBE_LOCATION=12230
	* Utilisation du pilote kvm2 basé sur le profil existant
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0812 23:51:36.002302  684273 out.go:298] Setting OutFile to fd 1 ...
	I0812 23:51:36.002523  684273 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0812 23:51:36.002536  684273 out.go:311] Setting ErrFile to fd 2...
	I0812 23:51:36.002541  684273 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0812 23:51:36.002759  684273 root.go:313] Updating PATH: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/bin
	I0812 23:51:36.003070  684273 out.go:305] Setting JSON to false
	I0812 23:51:36.040878  684273 start.go:111] hostinfo: {"hostname":"debian-jenkins-agent-10","uptime":12859,"bootTime":1628799437,"procs":190,"os":"linux","platform":"debian","platformFamily":"debian","platformVersion":"9.13","kernelVersion":"4.9.0-16-amd64","kernelArch":"x86_64","virtualizationSystem":"kvm","virtualizationRole":"guest","hostId":"c29e0b88-ef83-6765-d2fa-208fdce1af32"}
	I0812 23:51:36.041028  684273 start.go:121] virtualization: kvm guest
	I0812 23:51:36.043261  684273 out.go:177] * [functional-20210812234826-679351] minikube v1.22.0 sur Debian 9.13 (kvm/amd64)
	I0812 23:51:36.044745  684273 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/kubeconfig
	I0812 23:51:36.046255  684273 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-amd64
	I0812 23:51:36.047803  684273 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube
	I0812 23:51:36.049104  684273 out.go:177]   - MINIKUBE_LOCATION=12230
	I0812 23:51:36.049984  684273 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0812 23:51:36.050067  684273 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0812 23:51:36.062245  684273 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:37259
	I0812 23:51:36.062684  684273 main.go:130] libmachine: () Calling .GetVersion
	I0812 23:51:36.063274  684273 main.go:130] libmachine: Using API Version  1
	I0812 23:51:36.063297  684273 main.go:130] libmachine: () Calling .SetConfigRaw
	I0812 23:51:36.063738  684273 main.go:130] libmachine: () Calling .GetMachineName
	I0812 23:51:36.063941  684273 main.go:130] libmachine: (functional-20210812234826-679351) Calling .DriverName
	I0812 23:51:36.064146  684273 driver.go:335] Setting default libvirt URI to qemu:///system
	I0812 23:51:36.064616  684273 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0812 23:51:36.064660  684273 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0812 23:51:36.075567  684273 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:37395
	I0812 23:51:36.075977  684273 main.go:130] libmachine: () Calling .GetVersion
	I0812 23:51:36.076416  684273 main.go:130] libmachine: Using API Version  1
	I0812 23:51:36.076438  684273 main.go:130] libmachine: () Calling .SetConfigRaw
	I0812 23:51:36.076767  684273 main.go:130] libmachine: () Calling .GetMachineName
	I0812 23:51:36.076947  684273 main.go:130] libmachine: (functional-20210812234826-679351) Calling .DriverName
	I0812 23:51:36.108488  684273 out.go:177] * Utilisation du pilote kvm2 basé sur le profil existant
	I0812 23:51:36.108528  684273 start.go:278] selected driver: kvm2
	I0812 23:51:36.108537  684273 start.go:751] validating driver "kvm2" against &{Name:functional-20210812234826-679351 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube-builds/iso/12122/minikube-v1.22.0-1628238775-12122.iso KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.25@sha256:6f936e3443b95cd918d77623bf7b595653bb382766e280290a02b4a349e88b79 Memory:4000 CPUs:2 DiskSize:20000 VMDriver: Driver:kvm2 HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.99.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.21.3 Clu
sterName:functional-20210812234826-679351 Namespace:default APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI: NodeIP: NodePort:8441 NodeName:} Nodes:[{Name: IP:192.168.50.102 Port:8441 KubernetesVersion:v1.21.3 ControlPlane:true Worker:true}] Addons:map[ambassador:false auto-pause:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false helm-tiller:false ingress:false ingress-dns:false istio:false istio-provisioner:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:
false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false volumesnapshots:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: MultiNodeRequested:false ExtraDisks:0}
	I0812 23:51:36.108757  684273 start.go:762] status for kvm2: {Installed:true Healthy:true Running:true NeedsImprovement:false Error:<nil> Reason: Fix: Doc:}
	I0812 23:51:36.111652  684273 out.go:177] 
	W0812 23:51:36.111785  684273 out.go:242] X Fermeture en raison de RSRC_INSUFFICIENT_REQ_MEMORY : L'allocation de mémoire demandée 250 Mio est inférieure au minimum utilisable de 1800 Mo
	X Fermeture en raison de RSRC_INSUFFICIENT_REQ_MEMORY : L'allocation de mémoire demandée 250 Mio est inférieure au minimum utilisable de 1800 Mo
	I0812 23:51:36.113220  684273 out.go:177] 

                                                
                                                
** /stderr **
--- PASS: TestFunctional/parallel/InternationalLanguage (0.18s)

                                                
                                    
x
+
TestFunctional/parallel/StatusCmd (0.93s)

                                                
                                                
=== RUN   TestFunctional/parallel/StatusCmd
=== PAUSE TestFunctional/parallel/StatusCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/StatusCmd
functional_test.go:809: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 status
functional_test.go:815: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 status -f host:{{.Host}},kublet:{{.Kubelet}},apiserver:{{.APIServer}},kubeconfig:{{.Kubeconfig}}
functional_test.go:826: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 status -o json
--- PASS: TestFunctional/parallel/StatusCmd (0.93s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd (15.72s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd
=== PAUSE TestFunctional/parallel/ServiceCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ServiceCmd

                                                
                                                
=== CONT  TestFunctional/parallel/ServiceCmd
functional_test.go:1357: (dbg) Run:  kubectl --context functional-20210812234826-679351 create deployment hello-node --image=k8s.gcr.io/echoserver:1.8

                                                
                                                
=== CONT  TestFunctional/parallel/ServiceCmd
functional_test.go:1363: (dbg) Run:  kubectl --context functional-20210812234826-679351 expose deployment hello-node --type=NodePort --port=8080

                                                
                                                
=== CONT  TestFunctional/parallel/ServiceCmd
functional_test.go:1368: (dbg) TestFunctional/parallel/ServiceCmd: waiting 10m0s for pods matching "app=hello-node" in namespace "default" ...

                                                
                                                
=== CONT  TestFunctional/parallel/ServiceCmd
helpers_test.go:343: "hello-node-6cbfcd7cbc-lw7mk" [3e83ac27-2552-4d0f-a295-c238a1d40105] Pending / Ready:ContainersNotReady (containers with unready status: [echoserver]) / ContainersReady:ContainersNotReady (containers with unready status: [echoserver])

                                                
                                                
=== CONT  TestFunctional/parallel/ServiceCmd
helpers_test.go:343: "hello-node-6cbfcd7cbc-lw7mk" [3e83ac27-2552-4d0f-a295-c238a1d40105] Running

                                                
                                                
=== CONT  TestFunctional/parallel/ServiceCmd
functional_test.go:1368: (dbg) TestFunctional/parallel/ServiceCmd: app=hello-node healthy within 13.865350443s
functional_test.go:1372: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 service list
functional_test.go:1385: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 service --namespace=default --https --url hello-node
functional_test.go:1394: found endpoint: https://192.168.50.102:31056
functional_test.go:1405: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 service hello-node --url --format={{.IP}}
functional_test.go:1414: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 service hello-node --url
functional_test.go:1420: found endpoint for hello-node: http://192.168.50.102:31056
functional_test.go:1431: Attempting to fetch http://192.168.50.102:31056 ...
functional_test.go:1450: http://192.168.50.102:31056: success! body:

                                                
                                                

                                                
                                                
Hostname: hello-node-6cbfcd7cbc-lw7mk

                                                
                                                
Pod Information:
	-no pod information available-

                                                
                                                
Server values:
	server_version=nginx: 1.13.3 - lua: 10008

                                                
                                                
Request Information:
	client_address=10.244.0.1
	method=GET
	real path=/
	query=
	request_version=1.1
	request_uri=http://192.168.50.102:8080/

                                                
                                                
Request Headers:
	accept-encoding=gzip
	host=192.168.50.102:31056
	user-agent=Go-http-client/1.1

                                                
                                                
Request Body:
	-no body in request-

                                                
                                                
--- PASS: TestFunctional/parallel/ServiceCmd (15.72s)

                                                
                                    
x
+
TestFunctional/parallel/AddonsCmd (0.18s)

                                                
                                                
=== RUN   TestFunctional/parallel/AddonsCmd
=== PAUSE TestFunctional/parallel/AddonsCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/AddonsCmd
functional_test.go:1465: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 addons list

                                                
                                                
=== CONT  TestFunctional/parallel/AddonsCmd
functional_test.go:1476: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 addons list -o json
--- PASS: TestFunctional/parallel/AddonsCmd (0.18s)

                                                
                                    
x
+
TestFunctional/parallel/PersistentVolumeClaim (48.29s)

                                                
                                                
=== RUN   TestFunctional/parallel/PersistentVolumeClaim
=== PAUSE TestFunctional/parallel/PersistentVolumeClaim

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/PersistentVolumeClaim
functional_test_pvc_test.go:44: (dbg) TestFunctional/parallel/PersistentVolumeClaim: waiting 4m0s for pods matching "integration-test=storage-provisioner" in namespace "kube-system" ...
helpers_test.go:343: "storage-provisioner" [d964c6ad-f145-4dee-a728-845a0e68e743] Running

                                                
                                                
=== CONT  TestFunctional/parallel/PersistentVolumeClaim
functional_test_pvc_test.go:44: (dbg) TestFunctional/parallel/PersistentVolumeClaim: integration-test=storage-provisioner healthy within 5.009805734s
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-20210812234826-679351 get storageclass -o=json
functional_test_pvc_test.go:69: (dbg) Run:  kubectl --context functional-20210812234826-679351 apply -f testdata/storage-provisioner/pvc.yaml
functional_test_pvc_test.go:76: (dbg) Run:  kubectl --context functional-20210812234826-679351 get pvc myclaim -o=json
functional_test_pvc_test.go:76: (dbg) Run:  kubectl --context functional-20210812234826-679351 get pvc myclaim -o=json
functional_test_pvc_test.go:125: (dbg) Run:  kubectl --context functional-20210812234826-679351 apply -f testdata/storage-provisioner/pod.yaml
functional_test_pvc_test.go:130: (dbg) TestFunctional/parallel/PersistentVolumeClaim: waiting 3m0s for pods matching "test=storage-provisioner" in namespace "default" ...
helpers_test.go:343: "sp-pod" [5512464c-b5e5-4d50-b832-191bac287e03] Pending
helpers_test.go:343: "sp-pod" [5512464c-b5e5-4d50-b832-191bac287e03] Pending / Ready:ContainersNotReady (containers with unready status: [myfrontend]) / ContainersReady:ContainersNotReady (containers with unready status: [myfrontend])

                                                
                                                
=== CONT  TestFunctional/parallel/PersistentVolumeClaim
helpers_test.go:343: "sp-pod" [5512464c-b5e5-4d50-b832-191bac287e03] Running

                                                
                                                
=== CONT  TestFunctional/parallel/PersistentVolumeClaim
functional_test_pvc_test.go:130: (dbg) TestFunctional/parallel/PersistentVolumeClaim: test=storage-provisioner healthy within 18.050166294s
functional_test_pvc_test.go:100: (dbg) Run:  kubectl --context functional-20210812234826-679351 exec sp-pod -- touch /tmp/mount/foo

                                                
                                                
=== CONT  TestFunctional/parallel/PersistentVolumeClaim
functional_test_pvc_test.go:106: (dbg) Run:  kubectl --context functional-20210812234826-679351 delete -f testdata/storage-provisioner/pod.yaml

                                                
                                                
=== CONT  TestFunctional/parallel/PersistentVolumeClaim
functional_test_pvc_test.go:106: (dbg) Done: kubectl --context functional-20210812234826-679351 delete -f testdata/storage-provisioner/pod.yaml: (14.373044007s)
functional_test_pvc_test.go:125: (dbg) Run:  kubectl --context functional-20210812234826-679351 apply -f testdata/storage-provisioner/pod.yaml
functional_test_pvc_test.go:130: (dbg) TestFunctional/parallel/PersistentVolumeClaim: waiting 3m0s for pods matching "test=storage-provisioner" in namespace "default" ...
helpers_test.go:343: "sp-pod" [aee5f6e4-df46-48dc-93a1-db9e50027ce9] Pending
helpers_test.go:343: "sp-pod" [aee5f6e4-df46-48dc-93a1-db9e50027ce9] Pending / Ready:ContainersNotReady (containers with unready status: [myfrontend]) / ContainersReady:ContainersNotReady (containers with unready status: [myfrontend])
helpers_test.go:343: "sp-pod" [aee5f6e4-df46-48dc-93a1-db9e50027ce9] Running
functional_test_pvc_test.go:130: (dbg) TestFunctional/parallel/PersistentVolumeClaim: test=storage-provisioner healthy within 8.01233634s
functional_test_pvc_test.go:114: (dbg) Run:  kubectl --context functional-20210812234826-679351 exec sp-pod -- ls /tmp/mount
--- PASS: TestFunctional/parallel/PersistentVolumeClaim (48.29s)

                                                
                                    
x
+
TestFunctional/parallel/SSHCmd (0.52s)

                                                
                                                
=== RUN   TestFunctional/parallel/SSHCmd
=== PAUSE TestFunctional/parallel/SSHCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/SSHCmd
functional_test.go:1498: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "echo hello"

                                                
                                                
=== CONT  TestFunctional/parallel/SSHCmd
functional_test.go:1515: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "cat /etc/hostname"
--- PASS: TestFunctional/parallel/SSHCmd (0.52s)

                                                
                                    
x
+
TestFunctional/parallel/CpCmd (0.51s)

                                                
                                                
=== RUN   TestFunctional/parallel/CpCmd
=== PAUSE TestFunctional/parallel/CpCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/CpCmd
helpers_test.go:535: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 cp testdata/cp-test.txt /home/docker/cp-test.txt

                                                
                                                
=== CONT  TestFunctional/parallel/CpCmd
helpers_test.go:549: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "sudo cat /home/docker/cp-test.txt"
--- PASS: TestFunctional/parallel/CpCmd (0.51s)

                                                
                                    
x
+
TestFunctional/parallel/MySQL (25.9s)

                                                
                                                
=== RUN   TestFunctional/parallel/MySQL
=== PAUSE TestFunctional/parallel/MySQL

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/MySQL
functional_test.go:1546: (dbg) Run:  kubectl --context functional-20210812234826-679351 replace --force -f testdata/mysql.yaml

                                                
                                                
=== CONT  TestFunctional/parallel/MySQL
functional_test.go:1551: (dbg) TestFunctional/parallel/MySQL: waiting 10m0s for pods matching "app=mysql" in namespace "default" ...

                                                
                                                
=== CONT  TestFunctional/parallel/MySQL
helpers_test.go:343: "mysql-9bbbc5bbb-z8zth" [05e9cc59-b093-4c1c-8a21-b1bb678761cf] Pending / Ready:ContainersNotReady (containers with unready status: [mysql]) / ContainersReady:ContainersNotReady (containers with unready status: [mysql])

                                                
                                                
=== CONT  TestFunctional/parallel/MySQL
helpers_test.go:343: "mysql-9bbbc5bbb-z8zth" [05e9cc59-b093-4c1c-8a21-b1bb678761cf] Running

                                                
                                                
=== CONT  TestFunctional/parallel/MySQL
functional_test.go:1551: (dbg) TestFunctional/parallel/MySQL: app=mysql healthy within 20.014369712s
functional_test.go:1558: (dbg) Run:  kubectl --context functional-20210812234826-679351 exec mysql-9bbbc5bbb-z8zth -- mysql -ppassword -e "show databases;"

                                                
                                                
=== CONT  TestFunctional/parallel/MySQL
functional_test.go:1558: (dbg) Non-zero exit: kubectl --context functional-20210812234826-679351 exec mysql-9bbbc5bbb-z8zth -- mysql -ppassword -e "show databases;": exit status 1 (356.719826ms)

                                                
                                                
** stderr ** 
	mysql: [Warning] Using a password on the command line interface can be insecure.
	ERROR 1045 (28000): Access denied for user 'root'@'localhost' (using password: YES)
	command terminated with exit code 1

                                                
                                                
** /stderr **

                                                
                                                
=== CONT  TestFunctional/parallel/MySQL
functional_test.go:1558: (dbg) Run:  kubectl --context functional-20210812234826-679351 exec mysql-9bbbc5bbb-z8zth -- mysql -ppassword -e "show databases;"
functional_test.go:1558: (dbg) Non-zero exit: kubectl --context functional-20210812234826-679351 exec mysql-9bbbc5bbb-z8zth -- mysql -ppassword -e "show databases;": exit status 1 (405.856479ms)

                                                
                                                
** stderr ** 
	mysql: [Warning] Using a password on the command line interface can be insecure.
	ERROR 1045 (28000): Access denied for user 'root'@'localhost' (using password: YES)
	command terminated with exit code 1

                                                
                                                
** /stderr **

                                                
                                                
=== CONT  TestFunctional/parallel/MySQL
functional_test.go:1558: (dbg) Run:  kubectl --context functional-20210812234826-679351 exec mysql-9bbbc5bbb-z8zth -- mysql -ppassword -e "show databases;"

                                                
                                                
=== CONT  TestFunctional/parallel/MySQL
functional_test.go:1558: (dbg) Non-zero exit: kubectl --context functional-20210812234826-679351 exec mysql-9bbbc5bbb-z8zth -- mysql -ppassword -e "show databases;": exit status 1 (282.670537ms)

                                                
                                                
** stderr ** 
	mysql: [Warning] Using a password on the command line interface can be insecure.
	ERROR 2002 (HY000): Can't connect to local MySQL server through socket '/var/run/mysqld/mysqld.sock' (2)
	command terminated with exit code 1

                                                
                                                
** /stderr **

                                                
                                                
=== CONT  TestFunctional/parallel/MySQL
functional_test.go:1558: (dbg) Run:  kubectl --context functional-20210812234826-679351 exec mysql-9bbbc5bbb-z8zth -- mysql -ppassword -e "show databases;"
--- PASS: TestFunctional/parallel/MySQL (25.90s)

                                                
                                    
x
+
TestFunctional/parallel/FileSync (0.26s)

                                                
                                                
=== RUN   TestFunctional/parallel/FileSync
=== PAUSE TestFunctional/parallel/FileSync

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/FileSync
functional_test.go:1678: Checking for existence of /etc/test/nested/copy/679351/hosts within VM

                                                
                                                
=== CONT  TestFunctional/parallel/FileSync
functional_test.go:1679: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "sudo cat /etc/test/nested/copy/679351/hosts"

                                                
                                                
=== CONT  TestFunctional/parallel/FileSync
functional_test.go:1684: file sync test content: Test file for checking file sync process
--- PASS: TestFunctional/parallel/FileSync (0.26s)

                                                
                                    
x
+
TestFunctional/parallel/CertSync (1.77s)

                                                
                                                
=== RUN   TestFunctional/parallel/CertSync
=== PAUSE TestFunctional/parallel/CertSync

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/CertSync
functional_test.go:1719: Checking for existence of /etc/ssl/certs/679351.pem within VM
functional_test.go:1720: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "sudo cat /etc/ssl/certs/679351.pem"

                                                
                                                
=== CONT  TestFunctional/parallel/CertSync
functional_test.go:1719: Checking for existence of /usr/share/ca-certificates/679351.pem within VM
functional_test.go:1720: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "sudo cat /usr/share/ca-certificates/679351.pem"

                                                
                                                
=== CONT  TestFunctional/parallel/CertSync
functional_test.go:1719: Checking for existence of /etc/ssl/certs/51391683.0 within VM
functional_test.go:1720: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "sudo cat /etc/ssl/certs/51391683.0"
functional_test.go:1746: Checking for existence of /etc/ssl/certs/6793512.pem within VM
functional_test.go:1747: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "sudo cat /etc/ssl/certs/6793512.pem"
functional_test.go:1746: Checking for existence of /usr/share/ca-certificates/6793512.pem within VM
functional_test.go:1747: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "sudo cat /usr/share/ca-certificates/6793512.pem"
functional_test.go:1746: Checking for existence of /etc/ssl/certs/3ec20f2e.0 within VM
functional_test.go:1747: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "sudo cat /etc/ssl/certs/3ec20f2e.0"
--- PASS: TestFunctional/parallel/CertSync (1.77s)

                                                
                                    
x
+
TestFunctional/parallel/NodeLabels (0.07s)

                                                
                                                
=== RUN   TestFunctional/parallel/NodeLabels
=== PAUSE TestFunctional/parallel/NodeLabels

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/NodeLabels
functional_test.go:216: (dbg) Run:  kubectl --context functional-20210812234826-679351 get nodes --output=go-template "--template='{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'"
--- PASS: TestFunctional/parallel/NodeLabels (0.07s)

                                                
                                    
x
+
TestFunctional/parallel/LoadImage (3.07s)

                                                
                                                
=== RUN   TestFunctional/parallel/LoadImage
=== PAUSE TestFunctional/parallel/LoadImage

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/LoadImage
functional_test.go:239: (dbg) Run:  docker pull busybox:1.33

                                                
                                                
=== CONT  TestFunctional/parallel/LoadImage
functional_test.go:246: (dbg) Run:  docker tag busybox:1.33 docker.io/library/busybox:load-functional-20210812234826-679351
functional_test.go:252: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 image load docker.io/library/busybox:load-functional-20210812234826-679351

                                                
                                                
=== CONT  TestFunctional/parallel/LoadImage
functional_test.go:252: (dbg) Done: out/minikube-linux-amd64 -p functional-20210812234826-679351 image load docker.io/library/busybox:load-functional-20210812234826-679351: (2.257591918s)
functional_test.go:373: (dbg) Run:  out/minikube-linux-amd64 ssh -p functional-20210812234826-679351 -- sudo crictl inspecti docker.io/library/busybox:load-functional-20210812234826-679351
--- PASS: TestFunctional/parallel/LoadImage (3.07s)

                                                
                                    
x
+
TestFunctional/parallel/RemoveImage (3.5s)

                                                
                                                
=== RUN   TestFunctional/parallel/RemoveImage
=== PAUSE TestFunctional/parallel/RemoveImage

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/RemoveImage

                                                
                                                
=== CONT  TestFunctional/parallel/RemoveImage
functional_test.go:331: (dbg) Run:  docker pull busybox:1.32

                                                
                                                
=== CONT  TestFunctional/parallel/RemoveImage
functional_test.go:338: (dbg) Run:  docker tag busybox:1.32 docker.io/library/busybox:remove-functional-20210812234826-679351
functional_test.go:344: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 image load docker.io/library/busybox:remove-functional-20210812234826-679351

                                                
                                                
=== CONT  TestFunctional/parallel/RemoveImage
functional_test.go:344: (dbg) Done: out/minikube-linux-amd64 -p functional-20210812234826-679351 image load docker.io/library/busybox:remove-functional-20210812234826-679351: (2.278098734s)
functional_test.go:350: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 image rm docker.io/library/busybox:remove-functional-20210812234826-679351

                                                
                                                
=== CONT  TestFunctional/parallel/RemoveImage
functional_test.go:387: (dbg) Run:  out/minikube-linux-amd64 ssh -p functional-20210812234826-679351 -- sudo crictl images
--- PASS: TestFunctional/parallel/RemoveImage (3.50s)

                                                
                                    
x
+
TestFunctional/parallel/LoadImageFromFile (1.83s)

                                                
                                                
=== RUN   TestFunctional/parallel/LoadImageFromFile
=== PAUSE TestFunctional/parallel/LoadImageFromFile

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/LoadImageFromFile
functional_test.go:279: (dbg) Run:  docker pull busybox:1.31
E0812 23:51:33.975504  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory

                                                
                                                
=== CONT  TestFunctional/parallel/LoadImageFromFile
functional_test.go:286: (dbg) Run:  docker tag busybox:1.31 docker.io/library/busybox:load-from-file-functional-20210812234826-679351
functional_test.go:293: (dbg) Run:  docker save -o busybox.tar docker.io/library/busybox:load-from-file-functional-20210812234826-679351
functional_test.go:304: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 image load /home/jenkins/workspace/KVM_Linux_containerd_integration/busybox.tar

                                                
                                                
=== CONT  TestFunctional/parallel/LoadImageFromFile
functional_test.go:387: (dbg) Run:  out/minikube-linux-amd64 ssh -p functional-20210812234826-679351 -- sudo crictl images
--- PASS: TestFunctional/parallel/LoadImageFromFile (1.83s)

                                                
                                    
x
+
TestFunctional/parallel/BuildImage (4.29s)

                                                
                                                
=== RUN   TestFunctional/parallel/BuildImage
=== PAUSE TestFunctional/parallel/BuildImage

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/BuildImage
functional_test.go:407: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 image build -t localhost/my-image:functional-20210812234826-679351 testdata/build

                                                
                                                
=== CONT  TestFunctional/parallel/BuildImage
functional_test.go:407: (dbg) Done: out/minikube-linux-amd64 -p functional-20210812234826-679351 image build -t localhost/my-image:functional-20210812234826-679351 testdata/build: (4.05143243s)
functional_test.go:415: (dbg) Stderr: out/minikube-linux-amd64 -p functional-20210812234826-679351 image build -t localhost/my-image:functional-20210812234826-679351 testdata/build:
#1 [internal] load build definition from Dockerfile
#1 sha256:f02800fb2075095447668f04cfee48f3c70e28ca3c78c2dbd7a72b7b561ddae2
#1 transferring dockerfile: 77B done
#1 DONE 0.1s

                                                
                                                
#2 [internal] load .dockerignore
#2 sha256:7aff002112e0d03013cc505cfc1c7ebfdc3df8bc58431193eb82430e657875f8
#2 transferring context: 2B done
#2 DONE 0.1s

                                                
                                                
#3 [internal] load metadata for docker.io/library/busybox:latest
#3 sha256:da853382a7535e068feae4d80bdd0ad2567df3d5cd484fd68f919294d091b053
#3 DONE 0.5s

                                                
                                                
#6 [internal] load build context
#6 sha256:828a3a27338339bc21d057da5df54fcf5f0b3bf3030bcd8a66187f624d2afe9f
#6 transferring context: 62B done
#6 DONE 0.1s

                                                
                                                
#4 [1/3] FROM docker.io/library/busybox@sha256:0f354ec1728d9ff32edcd7d1b8bbdfc798277ad36120dc3dc683be44524c8b60
#4 sha256:ded7865542ebebf70d0aaf67ad943b500cec9f312d89a6193a225a6ec323f554
#4 resolve docker.io/library/busybox@sha256:0f354ec1728d9ff32edcd7d1b8bbdfc798277ad36120dc3dc683be44524c8b60 0.1s done
#4 DONE 0.1s

                                                
                                                
#5 [2/3] RUN true
#5 sha256:2a43a89554c5162e0102a64ecc9da0b10cc751afa63890115ad44d55d244fc76
#5 DONE 0.9s

                                                
                                                
#7 [3/3] ADD content.txt /
#7 sha256:ffaa79342a4f39c6beea3650e047556694a379b054312e9e81cc22621034d853
#7 DONE 0.1s

                                                
                                                
#8 exporting to image
#8 sha256:e8c613e07b0b7ff33893b694f7759a10d42e180f2b4dc349fb57dc6b71dcab00
#8 exporting layers
#8 exporting layers 0.4s done
#8 exporting manifest sha256:dd8b8abb833b0ce3a728ac7e6723ea92194e784d34f609258329099261bfdaef
#8 exporting manifest sha256:dd8b8abb833b0ce3a728ac7e6723ea92194e784d34f609258329099261bfdaef 0.0s done
#8 exporting config sha256:a2dae8f6d86eecf59dcd8b026ab4497d3bbf75f17f038efce820d9f2348a5430 0.0s done
#8 naming to localhost/my-image:functional-20210812234826-679351 done
#8 DONE 0.5s
functional_test.go:373: (dbg) Run:  out/minikube-linux-amd64 ssh -p functional-20210812234826-679351 -- sudo crictl inspecti localhost/my-image:functional-20210812234826-679351
--- PASS: TestFunctional/parallel/BuildImage (4.29s)

                                                
                                    
x
+
TestFunctional/parallel/ListImages (0.26s)

                                                
                                                
=== RUN   TestFunctional/parallel/ListImages
=== PAUSE TestFunctional/parallel/ListImages

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ListImages
functional_test.go:441: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 image ls
functional_test.go:446: (dbg) Stdout: out/minikube-linux-amd64 -p functional-20210812234826-679351 image ls:
k8s.gcr.io/pause:latest
k8s.gcr.io/pause:3.4.1
k8s.gcr.io/pause:3.3
k8s.gcr.io/pause:3.1
k8s.gcr.io/kube-scheduler:v1.21.3
k8s.gcr.io/kube-proxy:v1.21.3
k8s.gcr.io/kube-controller-manager:v1.21.3
k8s.gcr.io/kube-apiserver:v1.21.3
k8s.gcr.io/etcd:3.4.13-0
k8s.gcr.io/coredns/coredns:v1.8.0
gcr.io/k8s-minikube/storage-provisioner:v5
docker.io/library/minikube-local-cache-test:functional-20210812234826-679351
docker.io/kubernetesui/metrics-scraper:v1.0.4
docker.io/kubernetesui/dashboard:v2.1.0
docker.io/kindest/kindnetd:v20210326-1e038dc5
--- PASS: TestFunctional/parallel/ListImages (0.26s)

                                                
                                    
x
+
TestFunctional/parallel/NonActiveRuntimeDisabled (0.5s)

                                                
                                                
=== RUN   TestFunctional/parallel/NonActiveRuntimeDisabled
=== PAUSE TestFunctional/parallel/NonActiveRuntimeDisabled

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/NonActiveRuntimeDisabled
functional_test.go:1774: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "sudo systemctl is-active docker"
functional_test.go:1774: (dbg) Non-zero exit: out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "sudo systemctl is-active docker": exit status 1 (248.378405ms)

                                                
                                                
-- stdout --
	inactive

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
functional_test.go:1774: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "sudo systemctl is-active crio"

                                                
                                                
=== CONT  TestFunctional/parallel/NonActiveRuntimeDisabled
functional_test.go:1774: (dbg) Non-zero exit: out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "sudo systemctl is-active crio": exit status 1 (249.881638ms)

                                                
                                                
-- stdout --
	inactive

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
--- PASS: TestFunctional/parallel/NonActiveRuntimeDisabled (0.50s)

                                                
                                    
x
+
TestFunctional/parallel/UpdateContextCmd/no_changes (0.11s)

                                                
                                                
=== RUN   TestFunctional/parallel/UpdateContextCmd/no_changes
=== PAUSE TestFunctional/parallel/UpdateContextCmd/no_changes

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/UpdateContextCmd/no_changes
functional_test.go:1865: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 update-context --alsologtostderr -v=2
--- PASS: TestFunctional/parallel/UpdateContextCmd/no_changes (0.11s)

                                                
                                    
x
+
TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster (0.1s)

                                                
                                                
=== RUN   TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster
=== PAUSE TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster
functional_test.go:1865: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 update-context --alsologtostderr -v=2
--- PASS: TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster (0.10s)

                                                
                                    
x
+
TestFunctional/parallel/UpdateContextCmd/no_clusters (0.1s)

                                                
                                                
=== RUN   TestFunctional/parallel/UpdateContextCmd/no_clusters
=== PAUSE TestFunctional/parallel/UpdateContextCmd/no_clusters

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/UpdateContextCmd/no_clusters
functional_test.go:1865: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 update-context --alsologtostderr -v=2
--- PASS: TestFunctional/parallel/UpdateContextCmd/no_clusters (0.10s)

                                                
                                    
x
+
TestFunctional/parallel/Version/short (0.06s)

                                                
                                                
=== RUN   TestFunctional/parallel/Version/short
=== PAUSE TestFunctional/parallel/Version/short

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/Version/short
functional_test.go:2003: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 version --short
--- PASS: TestFunctional/parallel/Version/short (0.06s)

                                                
                                    
x
+
TestFunctional/parallel/Version/components (1.18s)

                                                
                                                
=== RUN   TestFunctional/parallel/Version/components
=== PAUSE TestFunctional/parallel/Version/components

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/Version/components
functional_test.go:2016: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 version -o=json --components

                                                
                                                
=== CONT  TestFunctional/parallel/Version/components
functional_test.go:2016: (dbg) Done: out/minikube-linux-amd64 -p functional-20210812234826-679351 version -o=json --components: (1.1783261s)
2021/08/12 23:51:40 [DEBUG] GET http://127.0.0.1:36195/api/v1/namespaces/kubernetes-dashboard/services/http:kubernetes-dashboard:/proxy/
--- PASS: TestFunctional/parallel/Version/components (1.18s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/StartTunnel (0.01s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/StartTunnel
functional_test_tunnel_test.go:126: (dbg) daemon: [out/minikube-linux-amd64 -p functional-20210812234826-679351 tunnel --alsologtostderr]
--- PASS: TestFunctional/parallel/TunnelCmd/serial/StartTunnel (0.01s)

                                                
                                    
x
+
TestFunctional/parallel/ProfileCmd/profile_not_create (0.42s)

                                                
                                                
=== RUN   TestFunctional/parallel/ProfileCmd/profile_not_create
functional_test.go:1202: (dbg) Run:  out/minikube-linux-amd64 profile lis
functional_test.go:1206: (dbg) Run:  out/minikube-linux-amd64 profile list --output json
--- PASS: TestFunctional/parallel/ProfileCmd/profile_not_create (0.42s)

                                                
                                    
x
+
TestFunctional/parallel/ProfileCmd/profile_list (0.33s)

                                                
                                                
=== RUN   TestFunctional/parallel/ProfileCmd/profile_list
functional_test.go:1240: (dbg) Run:  out/minikube-linux-amd64 profile list
functional_test.go:1245: Took "275.160707ms" to run "out/minikube-linux-amd64 profile list"
functional_test.go:1254: (dbg) Run:  out/minikube-linux-amd64 profile list -l
functional_test.go:1259: Took "55.996586ms" to run "out/minikube-linux-amd64 profile list -l"
--- PASS: TestFunctional/parallel/ProfileCmd/profile_list (0.33s)

                                                
                                    
x
+
TestFunctional/parallel/ProfileCmd/profile_json_output (0.33s)

                                                
                                                
=== RUN   TestFunctional/parallel/ProfileCmd/profile_json_output
functional_test.go:1290: (dbg) Run:  out/minikube-linux-amd64 profile list -o json

                                                
                                                
=== CONT  TestFunctional/parallel/ProfileCmd/profile_json_output
functional_test.go:1295: Took "265.897816ms" to run "out/minikube-linux-amd64 profile list -o json"
functional_test.go:1303: (dbg) Run:  out/minikube-linux-amd64 profile list -o json --light
functional_test.go:1308: Took "60.132993ms" to run "out/minikube-linux-amd64 profile list -o json --light"
--- PASS: TestFunctional/parallel/ProfileCmd/profile_json_output (0.33s)

                                                
                                    
x
+
TestFunctional/parallel/MountCmd/any-port (6.02s)

                                                
                                                
=== RUN   TestFunctional/parallel/MountCmd/any-port
functional_test_mount_test.go:76: (dbg) daemon: [out/minikube-linux-amd64 mount -p functional-20210812234826-679351 /tmp/mounttest786033249:/mount-9p --alsologtostderr -v=1]
functional_test_mount_test.go:110: wrote "test-1628812291258063683" to /tmp/mounttest786033249/created-by-test
functional_test_mount_test.go:110: wrote "test-1628812291258063683" to /tmp/mounttest786033249/created-by-test-removed-by-pod
functional_test_mount_test.go:110: wrote "test-1628812291258063683" to /tmp/mounttest786033249/test-1628812291258063683
functional_test_mount_test.go:118: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "findmnt -T /mount-9p | grep 9p"
functional_test_mount_test.go:118: (dbg) Non-zero exit: out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "findmnt -T /mount-9p | grep 9p": exit status 1 (226.729503ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **

                                                
                                                
=== CONT  TestFunctional/parallel/MountCmd/any-port
functional_test_mount_test.go:118: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "findmnt -T /mount-9p | grep 9p"
functional_test_mount_test.go:132: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh -- ls -la /mount-9p
functional_test_mount_test.go:136: guest mount directory contents
total 2
-rw-r--r-- 1 docker docker 24 Aug 12 23:51 created-by-test
-rw-r--r-- 1 docker docker 24 Aug 12 23:51 created-by-test-removed-by-pod
-rw-r--r-- 1 docker docker 24 Aug 12 23:51 test-1628812291258063683
functional_test_mount_test.go:140: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh cat /mount-9p/test-1628812291258063683

                                                
                                                
=== CONT  TestFunctional/parallel/MountCmd/any-port
functional_test_mount_test.go:151: (dbg) Run:  kubectl --context functional-20210812234826-679351 replace --force -f testdata/busybox-mount-test.yaml
functional_test_mount_test.go:156: (dbg) TestFunctional/parallel/MountCmd/any-port: waiting 4m0s for pods matching "integration-test=busybox-mount" in namespace "default" ...
helpers_test.go:343: "busybox-mount" [f8878eab-e2ce-4448-97ad-cd1e0d71f2d1] Pending

                                                
                                                
=== CONT  TestFunctional/parallel/MountCmd/any-port
helpers_test.go:343: "busybox-mount" [f8878eab-e2ce-4448-97ad-cd1e0d71f2d1] Pending / Ready:ContainersNotReady (containers with unready status: [mount-munger]) / ContainersReady:ContainersNotReady (containers with unready status: [mount-munger])

                                                
                                                
=== CONT  TestFunctional/parallel/MountCmd/any-port
helpers_test.go:343: "busybox-mount" [f8878eab-e2ce-4448-97ad-cd1e0d71f2d1] Succeeded: Initialized:PodCompleted / Ready:PodCompleted / ContainersReady:PodCompleted
functional_test_mount_test.go:156: (dbg) TestFunctional/parallel/MountCmd/any-port: integration-test=busybox-mount healthy within 3.015601847s
functional_test_mount_test.go:172: (dbg) Run:  kubectl --context functional-20210812234826-679351 logs busybox-mount

                                                
                                                
=== CONT  TestFunctional/parallel/MountCmd/any-port
functional_test_mount_test.go:184: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh stat /mount-9p/created-by-test

                                                
                                                
=== CONT  TestFunctional/parallel/MountCmd/any-port
functional_test_mount_test.go:184: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh stat /mount-9p/created-by-pod
functional_test_mount_test.go:93: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "sudo umount -f /mount-9p"
functional_test_mount_test.go:97: (dbg) stopping [out/minikube-linux-amd64 mount -p functional-20210812234826-679351 /tmp/mounttest786033249:/mount-9p --alsologtostderr -v=1] ...
--- PASS: TestFunctional/parallel/MountCmd/any-port (6.02s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/WaitService/IngressIP (0.07s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/WaitService/IngressIP
functional_test_tunnel_test.go:164: (dbg) Run:  kubectl --context functional-20210812234826-679351 get svc nginx-svc -o jsonpath={.status.loadBalancer.ingress[0].ip}
--- PASS: TestFunctional/parallel/TunnelCmd/serial/WaitService/IngressIP (0.07s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/AccessDirect (0.01s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/AccessDirect
functional_test_tunnel_test.go:229: tunnel at http://10.109.104.224 is working!
--- PASS: TestFunctional/parallel/TunnelCmd/serial/AccessDirect (0.01s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel (0.11s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel
functional_test_tunnel_test.go:364: (dbg) stopping [out/minikube-linux-amd64 -p functional-20210812234826-679351 tunnel --alsologtostderr] ...
--- PASS: TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel (0.11s)

                                                
                                    
x
+
TestFunctional/parallel/MountCmd/specific-port (2.27s)

                                                
                                                
=== RUN   TestFunctional/parallel/MountCmd/specific-port
functional_test_mount_test.go:225: (dbg) daemon: [out/minikube-linux-amd64 mount -p functional-20210812234826-679351 /tmp/mounttest623817292:/mount-9p --alsologtostderr -v=1 --port 46464]
functional_test_mount_test.go:255: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "findmnt -T /mount-9p | grep 9p"
functional_test_mount_test.go:255: (dbg) Non-zero exit: out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "findmnt -T /mount-9p | grep 9p": exit status 1 (270.971993ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **

                                                
                                                
=== CONT  TestFunctional/parallel/MountCmd/specific-port
functional_test_mount_test.go:255: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "findmnt -T /mount-9p | grep 9p"

                                                
                                                
=== CONT  TestFunctional/parallel/MountCmd/specific-port
functional_test_mount_test.go:269: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh -- ls -la /mount-9p

                                                
                                                
=== CONT  TestFunctional/parallel/MountCmd/specific-port
functional_test_mount_test.go:273: guest mount directory contents
total 0
functional_test_mount_test.go:275: (dbg) stopping [out/minikube-linux-amd64 mount -p functional-20210812234826-679351 /tmp/mounttest623817292:/mount-9p --alsologtostderr -v=1 --port 46464] ...

                                                
                                                
=== CONT  TestFunctional/parallel/MountCmd/specific-port
functional_test_mount_test.go:276: reading mount text
functional_test_mount_test.go:290: done reading mount text
functional_test_mount_test.go:242: (dbg) Run:  out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "sudo umount -f /mount-9p"
functional_test_mount_test.go:242: (dbg) Non-zero exit: out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh "sudo umount -f /mount-9p": exit status 1 (319.067271ms)

                                                
                                                
-- stdout --
	umount: /mount-9p: not mounted.

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 32

                                                
                                                
** /stderr **
functional_test_mount_test.go:244: "out/minikube-linux-amd64 -p functional-20210812234826-679351 ssh \"sudo umount -f /mount-9p\"": exit status 1
functional_test_mount_test.go:246: (dbg) stopping [out/minikube-linux-amd64 mount -p functional-20210812234826-679351 /tmp/mounttest623817292:/mount-9p --alsologtostderr -v=1 --port 46464] ...
--- PASS: TestFunctional/parallel/MountCmd/specific-port (2.27s)

                                                
                                    
x
+
TestFunctional/delete_busybox_image (0.09s)

                                                
                                                
=== RUN   TestFunctional/delete_busybox_image
functional_test.go:183: (dbg) Run:  docker rmi -f docker.io/library/busybox:load-functional-20210812234826-679351
functional_test.go:188: (dbg) Run:  docker rmi -f docker.io/library/busybox:remove-functional-20210812234826-679351
--- PASS: TestFunctional/delete_busybox_image (0.09s)

                                                
                                    
x
+
TestFunctional/delete_my-image_image (0.04s)

                                                
                                                
=== RUN   TestFunctional/delete_my-image_image
functional_test.go:195: (dbg) Run:  docker rmi -f localhost/my-image:functional-20210812234826-679351
--- PASS: TestFunctional/delete_my-image_image (0.04s)

                                                
                                    
x
+
TestFunctional/delete_minikube_cached_images (0.04s)

                                                
                                                
=== RUN   TestFunctional/delete_minikube_cached_images
functional_test.go:203: (dbg) Run:  docker rmi -f minikube-local-cache-test:functional-20210812234826-679351
--- PASS: TestFunctional/delete_minikube_cached_images (0.04s)

                                                
                                    
x
+
TestJSONOutput/start/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/start/Audit
--- PASS: TestJSONOutput/start/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/start/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/start/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/start/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/start/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/start/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/start/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/start/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/start/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/start/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/start/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/pause/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/pause/Audit
--- PASS: TestJSONOutput/pause/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/pause/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/pause/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/pause/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/pause/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/pause/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/pause/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/pause/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/pause/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/pause/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/pause/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/unpause/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/Audit
--- PASS: TestJSONOutput/unpause/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/unpause/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/unpause/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/unpause/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/unpause/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/unpause/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/unpause/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/unpause/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/unpause/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/stop/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/stop/Audit
--- PASS: TestJSONOutput/stop/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/stop/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/stop/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/stop/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/stop/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/stop/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/stop/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/stop/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/stop/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/stop/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/stop/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestErrorJSONOutput (0.32s)

                                                
                                                
=== RUN   TestErrorJSONOutput
json_output_test.go:146: (dbg) Run:  out/minikube-linux-amd64 start -p json-output-error-20210812235331-679351 --memory=2200 --output=json --wait=true --driver=fail
json_output_test.go:146: (dbg) Non-zero exit: out/minikube-linux-amd64 start -p json-output-error-20210812235331-679351 --memory=2200 --output=json --wait=true --driver=fail: exit status 56 (90.661849ms)

                                                
                                                
-- stdout --
	{"data":{"currentstep":"0","message":"[json-output-error-20210812235331-679351] minikube v1.22.0 on Debian 9.13 (kvm/amd64)","name":"Initial Minikube Setup","totalsteps":"19"},"datacontenttype":"application/json","id":"9f626d5d-34be-40d0-b6f0-907003d04b71","source":"https://minikube.sigs.k8s.io/","specversion":"1.0","type":"io.k8s.sigs.minikube.step"}
	{"data":{"message":"KUBECONFIG=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/kubeconfig"},"datacontenttype":"application/json","id":"20004609-b8f7-4968-8cad-2d768f4d88d3","source":"https://minikube.sigs.k8s.io/","specversion":"1.0","type":"io.k8s.sigs.minikube.info"}
	{"data":{"message":"MINIKUBE_BIN=out/minikube-linux-amd64"},"datacontenttype":"application/json","id":"7a7126a6-1dba-45dd-a1fc-c3debd964fbf","source":"https://minikube.sigs.k8s.io/","specversion":"1.0","type":"io.k8s.sigs.minikube.info"}
	{"data":{"message":"MINIKUBE_HOME=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube"},"datacontenttype":"application/json","id":"3aa97c6f-e360-489c-867a-7601ac554b45","source":"https://minikube.sigs.k8s.io/","specversion":"1.0","type":"io.k8s.sigs.minikube.info"}
	{"data":{"message":"MINIKUBE_LOCATION=12230"},"datacontenttype":"application/json","id":"6f422940-c43f-45be-bb62-b47d9ce5d7c8","source":"https://minikube.sigs.k8s.io/","specversion":"1.0","type":"io.k8s.sigs.minikube.info"}
	{"data":{"advice":"","exitcode":"56","issues":"","message":"The driver 'fail' is not supported on linux/amd64","name":"DRV_UNSUPPORTED_OS","url":""},"datacontenttype":"application/json","id":"aff18c3d-fcc0-4193-8433-a80383854501","source":"https://minikube.sigs.k8s.io/","specversion":"1.0","type":"io.k8s.sigs.minikube.error"}

                                                
                                                
-- /stdout --
helpers_test.go:176: Cleaning up "json-output-error-20210812235331-679351" profile ...
helpers_test.go:179: (dbg) Run:  out/minikube-linux-amd64 delete -p json-output-error-20210812235331-679351
--- PASS: TestErrorJSONOutput (0.32s)

                                                
                                    
x
+
TestMainNoArgs (0.05s)

                                                
                                                
=== RUN   TestMainNoArgs
main_test.go:68: (dbg) Run:  out/minikube-linux-amd64
--- PASS: TestMainNoArgs (0.05s)

                                                
                                    
x
+
TestMultiNode/serial/FreshStart2Nodes (148.62s)

                                                
                                                
=== RUN   TestMultiNode/serial/FreshStart2Nodes
multinode_test.go:81: (dbg) Run:  out/minikube-linux-amd64 start -p multinode-20210812235331-679351 --wait=true --memory=2200 --nodes=2 -v=8 --alsologtostderr --driver=kvm2  --container-runtime=containerd
E0812 23:53:49.240206  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0812 23:54:17.816719  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
multinode_test.go:81: (dbg) Done: out/minikube-linux-amd64 start -p multinode-20210812235331-679351 --wait=true --memory=2200 --nodes=2 -v=8 --alsologtostderr --driver=kvm2  --container-runtime=containerd: (2m28.191761006s)
multinode_test.go:87: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 status --alsologtostderr
--- PASS: TestMultiNode/serial/FreshStart2Nodes (148.62s)

                                                
                                    
x
+
TestMultiNode/serial/DeployApp2Nodes (5.65s)

                                                
                                                
=== RUN   TestMultiNode/serial/DeployApp2Nodes
multinode_test.go:462: (dbg) Run:  out/minikube-linux-amd64 kubectl -p multinode-20210812235331-679351 -- apply -f ./testdata/multinodes/multinode-pod-dns-test.yaml
multinode_test.go:467: (dbg) Run:  out/minikube-linux-amd64 kubectl -p multinode-20210812235331-679351 -- rollout status deployment/busybox
multinode_test.go:467: (dbg) Done: out/minikube-linux-amd64 kubectl -p multinode-20210812235331-679351 -- rollout status deployment/busybox: (3.416598159s)
multinode_test.go:473: (dbg) Run:  out/minikube-linux-amd64 kubectl -p multinode-20210812235331-679351 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:485: (dbg) Run:  out/minikube-linux-amd64 kubectl -p multinode-20210812235331-679351 -- get pods -o jsonpath='{.items[*].metadata.name}'
multinode_test.go:493: (dbg) Run:  out/minikube-linux-amd64 kubectl -p multinode-20210812235331-679351 -- exec busybox-84b6686758-8bnbs -- nslookup kubernetes.io
multinode_test.go:493: (dbg) Run:  out/minikube-linux-amd64 kubectl -p multinode-20210812235331-679351 -- exec busybox-84b6686758-cl242 -- nslookup kubernetes.io
multinode_test.go:503: (dbg) Run:  out/minikube-linux-amd64 kubectl -p multinode-20210812235331-679351 -- exec busybox-84b6686758-8bnbs -- nslookup kubernetes.default
multinode_test.go:503: (dbg) Run:  out/minikube-linux-amd64 kubectl -p multinode-20210812235331-679351 -- exec busybox-84b6686758-cl242 -- nslookup kubernetes.default
multinode_test.go:511: (dbg) Run:  out/minikube-linux-amd64 kubectl -p multinode-20210812235331-679351 -- exec busybox-84b6686758-8bnbs -- nslookup kubernetes.default.svc.cluster.local
multinode_test.go:511: (dbg) Run:  out/minikube-linux-amd64 kubectl -p multinode-20210812235331-679351 -- exec busybox-84b6686758-cl242 -- nslookup kubernetes.default.svc.cluster.local
--- PASS: TestMultiNode/serial/DeployApp2Nodes (5.65s)

                                                
                                    
x
+
TestMultiNode/serial/PingHostFrom2Pods (1.06s)

                                                
                                                
=== RUN   TestMultiNode/serial/PingHostFrom2Pods
multinode_test.go:521: (dbg) Run:  out/minikube-linux-amd64 kubectl -p multinode-20210812235331-679351 -- get pods -o jsonpath='{.items[*].metadata.name}'
multinode_test.go:529: (dbg) Run:  out/minikube-linux-amd64 kubectl -p multinode-20210812235331-679351 -- exec busybox-84b6686758-8bnbs -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
multinode_test.go:537: (dbg) Run:  out/minikube-linux-amd64 kubectl -p multinode-20210812235331-679351 -- exec busybox-84b6686758-8bnbs -- sh -c "ping -c 1 192.168.50.1"
multinode_test.go:529: (dbg) Run:  out/minikube-linux-amd64 kubectl -p multinode-20210812235331-679351 -- exec busybox-84b6686758-cl242 -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
multinode_test.go:537: (dbg) Run:  out/minikube-linux-amd64 kubectl -p multinode-20210812235331-679351 -- exec busybox-84b6686758-cl242 -- sh -c "ping -c 1 192.168.50.1"
--- PASS: TestMultiNode/serial/PingHostFrom2Pods (1.06s)

                                                
                                    
x
+
TestMultiNode/serial/AddNode (58.15s)

                                                
                                                
=== RUN   TestMultiNode/serial/AddNode
multinode_test.go:106: (dbg) Run:  out/minikube-linux-amd64 node add -p multinode-20210812235331-679351 -v 3 --alsologtostderr
E0812 23:56:09.471189  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0812 23:56:09.476449  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0812 23:56:09.486734  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0812 23:56:09.506984  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0812 23:56:09.547262  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0812 23:56:09.627608  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0812 23:56:09.788007  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0812 23:56:10.108681  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0812 23:56:10.749747  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0812 23:56:12.030467  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0812 23:56:14.591292  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0812 23:56:19.712222  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0812 23:56:29.952416  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0812 23:56:50.432971  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
multinode_test.go:106: (dbg) Done: out/minikube-linux-amd64 node add -p multinode-20210812235331-679351 -v 3 --alsologtostderr: (57.580520978s)
multinode_test.go:112: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 status --alsologtostderr
--- PASS: TestMultiNode/serial/AddNode (58.15s)

                                                
                                    
x
+
TestMultiNode/serial/ProfileList (0.24s)

                                                
                                                
=== RUN   TestMultiNode/serial/ProfileList
multinode_test.go:128: (dbg) Run:  out/minikube-linux-amd64 profile list --output json
--- PASS: TestMultiNode/serial/ProfileList (0.24s)

                                                
                                    
x
+
TestMultiNode/serial/CopyFile (1.79s)

                                                
                                                
=== RUN   TestMultiNode/serial/CopyFile
multinode_test.go:169: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 status --output json --alsologtostderr
helpers_test.go:535: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 cp testdata/cp-test.txt /home/docker/cp-test.txt
helpers_test.go:549: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 ssh "sudo cat /home/docker/cp-test.txt"
helpers_test.go:535: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 cp testdata/cp-test.txt multinode-20210812235331-679351-m02:/home/docker/cp-test.txt
helpers_test.go:549: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 ssh -n multinode-20210812235331-679351-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:535: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 cp testdata/cp-test.txt multinode-20210812235331-679351-m03:/home/docker/cp-test.txt
helpers_test.go:549: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 ssh -n multinode-20210812235331-679351-m03 "sudo cat /home/docker/cp-test.txt"
--- PASS: TestMultiNode/serial/CopyFile (1.79s)

                                                
                                    
x
+
TestMultiNode/serial/StopNode (2.95s)

                                                
                                                
=== RUN   TestMultiNode/serial/StopNode
multinode_test.go:191: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 node stop m03
multinode_test.go:191: (dbg) Done: out/minikube-linux-amd64 -p multinode-20210812235331-679351 node stop m03: (2.088244527s)
multinode_test.go:197: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 status
multinode_test.go:197: (dbg) Non-zero exit: out/minikube-linux-amd64 -p multinode-20210812235331-679351 status: exit status 7 (427.898335ms)

                                                
                                                
-- stdout --
	multinode-20210812235331-679351
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	multinode-20210812235331-679351-m02
	type: Worker
	host: Running
	kubelet: Running
	
	multinode-20210812235331-679351-m03
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
multinode_test.go:204: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 status --alsologtostderr
multinode_test.go:204: (dbg) Non-zero exit: out/minikube-linux-amd64 -p multinode-20210812235331-679351 status --alsologtostderr: exit status 7 (436.350811ms)

                                                
                                                
-- stdout --
	multinode-20210812235331-679351
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	multinode-20210812235331-679351-m02
	type: Worker
	host: Running
	kubelet: Running
	
	multinode-20210812235331-679351-m03
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0812 23:57:10.067507  686924 out.go:298] Setting OutFile to fd 1 ...
	I0812 23:57:10.067591  686924 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0812 23:57:10.067596  686924 out.go:311] Setting ErrFile to fd 2...
	I0812 23:57:10.067599  686924 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0812 23:57:10.067694  686924 root.go:313] Updating PATH: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/bin
	I0812 23:57:10.067862  686924 out.go:305] Setting JSON to false
	I0812 23:57:10.067883  686924 mustload.go:65] Loading cluster: multinode-20210812235331-679351
	I0812 23:57:10.068209  686924 status.go:253] checking status of multinode-20210812235331-679351 ...
	I0812 23:57:10.068576  686924 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0812 23:57:10.068627  686924 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0812 23:57:10.079692  686924 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:38677
	I0812 23:57:10.080260  686924 main.go:130] libmachine: () Calling .GetVersion
	I0812 23:57:10.080854  686924 main.go:130] libmachine: Using API Version  1
	I0812 23:57:10.080881  686924 main.go:130] libmachine: () Calling .SetConfigRaw
	I0812 23:57:10.081255  686924 main.go:130] libmachine: () Calling .GetMachineName
	I0812 23:57:10.081437  686924 main.go:130] libmachine: (multinode-20210812235331-679351) Calling .GetState
	I0812 23:57:10.084711  686924 status.go:328] multinode-20210812235331-679351 host status = "Running" (err=<nil>)
	I0812 23:57:10.084738  686924 host.go:66] Checking if "multinode-20210812235331-679351" exists ...
	I0812 23:57:10.085064  686924 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0812 23:57:10.085101  686924 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0812 23:57:10.096205  686924 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:44105
	I0812 23:57:10.096616  686924 main.go:130] libmachine: () Calling .GetVersion
	I0812 23:57:10.097034  686924 main.go:130] libmachine: Using API Version  1
	I0812 23:57:10.097053  686924 main.go:130] libmachine: () Calling .SetConfigRaw
	I0812 23:57:10.097410  686924 main.go:130] libmachine: () Calling .GetMachineName
	I0812 23:57:10.097614  686924 main.go:130] libmachine: (multinode-20210812235331-679351) Calling .GetIP
	I0812 23:57:10.102773  686924 main.go:130] libmachine: (multinode-20210812235331-679351) DBG | domain multinode-20210812235331-679351 has defined MAC address 52:54:00:c9:66:dd in network mk-multinode-20210812235331-679351
	I0812 23:57:10.103212  686924 main.go:130] libmachine: (multinode-20210812235331-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:c9:66:dd", ip: ""} in network mk-multinode-20210812235331-679351: {Iface:virbr5 ExpiryTime:2021-08-13 00:53:46 +0000 UTC Type:0 Mac:52:54:00:c9:66:dd Iaid: IPaddr:192.168.50.42 Prefix:24 Hostname:multinode-20210812235331-679351 Clientid:01:52:54:00:c9:66:dd}
	I0812 23:57:10.103240  686924 main.go:130] libmachine: (multinode-20210812235331-679351) DBG | domain multinode-20210812235331-679351 has defined IP address 192.168.50.42 and MAC address 52:54:00:c9:66:dd in network mk-multinode-20210812235331-679351
	I0812 23:57:10.103386  686924 host.go:66] Checking if "multinode-20210812235331-679351" exists ...
	I0812 23:57:10.103782  686924 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0812 23:57:10.103827  686924 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0812 23:57:10.114425  686924 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:33681
	I0812 23:57:10.114858  686924 main.go:130] libmachine: () Calling .GetVersion
	I0812 23:57:10.115267  686924 main.go:130] libmachine: Using API Version  1
	I0812 23:57:10.115286  686924 main.go:130] libmachine: () Calling .SetConfigRaw
	I0812 23:57:10.115643  686924 main.go:130] libmachine: () Calling .GetMachineName
	I0812 23:57:10.115814  686924 main.go:130] libmachine: (multinode-20210812235331-679351) Calling .DriverName
	I0812 23:57:10.116037  686924 ssh_runner.go:149] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0812 23:57:10.116065  686924 main.go:130] libmachine: (multinode-20210812235331-679351) Calling .GetSSHHostname
	I0812 23:57:10.121016  686924 main.go:130] libmachine: (multinode-20210812235331-679351) DBG | domain multinode-20210812235331-679351 has defined MAC address 52:54:00:c9:66:dd in network mk-multinode-20210812235331-679351
	I0812 23:57:10.121410  686924 main.go:130] libmachine: (multinode-20210812235331-679351) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:c9:66:dd", ip: ""} in network mk-multinode-20210812235331-679351: {Iface:virbr5 ExpiryTime:2021-08-13 00:53:46 +0000 UTC Type:0 Mac:52:54:00:c9:66:dd Iaid: IPaddr:192.168.50.42 Prefix:24 Hostname:multinode-20210812235331-679351 Clientid:01:52:54:00:c9:66:dd}
	I0812 23:57:10.121445  686924 main.go:130] libmachine: (multinode-20210812235331-679351) DBG | domain multinode-20210812235331-679351 has defined IP address 192.168.50.42 and MAC address 52:54:00:c9:66:dd in network mk-multinode-20210812235331-679351
	I0812 23:57:10.121535  686924 main.go:130] libmachine: (multinode-20210812235331-679351) Calling .GetSSHPort
	I0812 23:57:10.121720  686924 main.go:130] libmachine: (multinode-20210812235331-679351) Calling .GetSSHKeyPath
	I0812 23:57:10.121855  686924 main.go:130] libmachine: (multinode-20210812235331-679351) Calling .GetSSHUsername
	I0812 23:57:10.121986  686924 sshutil.go:53] new ssh client: &{IP:192.168.50.42 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/multinode-20210812235331-679351/id_rsa Username:docker}
	I0812 23:57:10.229729  686924 ssh_runner.go:149] Run: systemctl --version
	I0812 23:57:10.236275  686924 ssh_runner.go:149] Run: sudo systemctl is-active --quiet service kubelet
	I0812 23:57:10.249954  686924 kubeconfig.go:93] found "multinode-20210812235331-679351" server: "https://192.168.50.42:8443"
	I0812 23:57:10.249992  686924 api_server.go:164] Checking apiserver status ...
	I0812 23:57:10.250035  686924 ssh_runner.go:149] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0812 23:57:10.260390  686924 ssh_runner.go:149] Run: sudo egrep ^[0-9]+:freezer: /proc/2693/cgroup
	I0812 23:57:10.268233  686924 api_server.go:180] apiserver freezer: "2:freezer:/kubepods/burstable/pode91743d27d096b7d7c6c253e7d712233/14677a2f4a9368248a0a41917b469d0bced92580b04169f0ee18161260359e3b"
	I0812 23:57:10.268305  686924 ssh_runner.go:149] Run: sudo cat /sys/fs/cgroup/freezer/kubepods/burstable/pode91743d27d096b7d7c6c253e7d712233/14677a2f4a9368248a0a41917b469d0bced92580b04169f0ee18161260359e3b/freezer.state
	I0812 23:57:10.276658  686924 api_server.go:202] freezer state: "THAWED"
	I0812 23:57:10.276683  686924 api_server.go:239] Checking apiserver healthz at https://192.168.50.42:8443/healthz ...
	I0812 23:57:10.284017  686924 api_server.go:265] https://192.168.50.42:8443/healthz returned 200:
	ok
	I0812 23:57:10.284040  686924 status.go:419] multinode-20210812235331-679351 apiserver status = Running (err=<nil>)
	I0812 23:57:10.284050  686924 status.go:255] multinode-20210812235331-679351 status: &{Name:multinode-20210812235331-679351 Host:Running Kubelet:Running APIServer:Running Kubeconfig:Configured Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0812 23:57:10.284077  686924 status.go:253] checking status of multinode-20210812235331-679351-m02 ...
	I0812 23:57:10.284474  686924 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0812 23:57:10.284519  686924 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0812 23:57:10.295772  686924 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:39447
	I0812 23:57:10.296228  686924 main.go:130] libmachine: () Calling .GetVersion
	I0812 23:57:10.296805  686924 main.go:130] libmachine: Using API Version  1
	I0812 23:57:10.296822  686924 main.go:130] libmachine: () Calling .SetConfigRaw
	I0812 23:57:10.297249  686924 main.go:130] libmachine: () Calling .GetMachineName
	I0812 23:57:10.297464  686924 main.go:130] libmachine: (multinode-20210812235331-679351-m02) Calling .GetState
	I0812 23:57:10.301085  686924 status.go:328] multinode-20210812235331-679351-m02 host status = "Running" (err=<nil>)
	I0812 23:57:10.301107  686924 host.go:66] Checking if "multinode-20210812235331-679351-m02" exists ...
	I0812 23:57:10.301457  686924 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0812 23:57:10.301502  686924 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0812 23:57:10.312266  686924 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:34285
	I0812 23:57:10.312703  686924 main.go:130] libmachine: () Calling .GetVersion
	I0812 23:57:10.313177  686924 main.go:130] libmachine: Using API Version  1
	I0812 23:57:10.313200  686924 main.go:130] libmachine: () Calling .SetConfigRaw
	I0812 23:57:10.313500  686924 main.go:130] libmachine: () Calling .GetMachineName
	I0812 23:57:10.313693  686924 main.go:130] libmachine: (multinode-20210812235331-679351-m02) Calling .GetIP
	I0812 23:57:10.319110  686924 main.go:130] libmachine: (multinode-20210812235331-679351-m02) DBG | domain multinode-20210812235331-679351-m02 has defined MAC address 52:54:00:0f:95:11 in network mk-multinode-20210812235331-679351
	I0812 23:57:10.319439  686924 main.go:130] libmachine: (multinode-20210812235331-679351-m02) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:0f:95:11", ip: ""} in network mk-multinode-20210812235331-679351: {Iface:virbr5 ExpiryTime:2021-08-13 00:55:17 +0000 UTC Type:0 Mac:52:54:00:0f:95:11 Iaid: IPaddr:192.168.50.31 Prefix:24 Hostname:multinode-20210812235331-679351-m02 Clientid:01:52:54:00:0f:95:11}
	I0812 23:57:10.319466  686924 main.go:130] libmachine: (multinode-20210812235331-679351-m02) DBG | domain multinode-20210812235331-679351-m02 has defined IP address 192.168.50.31 and MAC address 52:54:00:0f:95:11 in network mk-multinode-20210812235331-679351
	I0812 23:57:10.319655  686924 host.go:66] Checking if "multinode-20210812235331-679351-m02" exists ...
	I0812 23:57:10.319974  686924 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0812 23:57:10.320008  686924 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0812 23:57:10.330958  686924 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:33737
	I0812 23:57:10.331366  686924 main.go:130] libmachine: () Calling .GetVersion
	I0812 23:57:10.331809  686924 main.go:130] libmachine: Using API Version  1
	I0812 23:57:10.331829  686924 main.go:130] libmachine: () Calling .SetConfigRaw
	I0812 23:57:10.332154  686924 main.go:130] libmachine: () Calling .GetMachineName
	I0812 23:57:10.332336  686924 main.go:130] libmachine: (multinode-20210812235331-679351-m02) Calling .DriverName
	I0812 23:57:10.332551  686924 ssh_runner.go:149] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0812 23:57:10.332574  686924 main.go:130] libmachine: (multinode-20210812235331-679351-m02) Calling .GetSSHHostname
	I0812 23:57:10.337770  686924 main.go:130] libmachine: (multinode-20210812235331-679351-m02) DBG | domain multinode-20210812235331-679351-m02 has defined MAC address 52:54:00:0f:95:11 in network mk-multinode-20210812235331-679351
	I0812 23:57:10.338127  686924 main.go:130] libmachine: (multinode-20210812235331-679351-m02) DBG | found host DHCP lease matching {name: "", mac: "52:54:00:0f:95:11", ip: ""} in network mk-multinode-20210812235331-679351: {Iface:virbr5 ExpiryTime:2021-08-13 00:55:17 +0000 UTC Type:0 Mac:52:54:00:0f:95:11 Iaid: IPaddr:192.168.50.31 Prefix:24 Hostname:multinode-20210812235331-679351-m02 Clientid:01:52:54:00:0f:95:11}
	I0812 23:57:10.338162  686924 main.go:130] libmachine: (multinode-20210812235331-679351-m02) DBG | domain multinode-20210812235331-679351-m02 has defined IP address 192.168.50.31 and MAC address 52:54:00:0f:95:11 in network mk-multinode-20210812235331-679351
	I0812 23:57:10.338251  686924 main.go:130] libmachine: (multinode-20210812235331-679351-m02) Calling .GetSSHPort
	I0812 23:57:10.338420  686924 main.go:130] libmachine: (multinode-20210812235331-679351-m02) Calling .GetSSHKeyPath
	I0812 23:57:10.338565  686924 main.go:130] libmachine: (multinode-20210812235331-679351-m02) Calling .GetSSHUsername
	I0812 23:57:10.338693  686924 sshutil.go:53] new ssh client: &{IP:192.168.50.31 Port:22 SSHKeyPath:/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/machines/multinode-20210812235331-679351-m02/id_rsa Username:docker}
	I0812 23:57:10.422171  686924 ssh_runner.go:149] Run: sudo systemctl is-active --quiet service kubelet
	I0812 23:57:10.432877  686924 status.go:255] multinode-20210812235331-679351-m02 status: &{Name:multinode-20210812235331-679351-m02 Host:Running Kubelet:Running APIServer:Irrelevant Kubeconfig:Irrelevant Worker:true TimeToStop: DockerEnv: PodManEnv:}
	I0812 23:57:10.432916  686924 status.go:253] checking status of multinode-20210812235331-679351-m03 ...
	I0812 23:57:10.433237  686924 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0812 23:57:10.433278  686924 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0812 23:57:10.444824  686924 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:41489
	I0812 23:57:10.445320  686924 main.go:130] libmachine: () Calling .GetVersion
	I0812 23:57:10.445867  686924 main.go:130] libmachine: Using API Version  1
	I0812 23:57:10.445891  686924 main.go:130] libmachine: () Calling .SetConfigRaw
	I0812 23:57:10.446252  686924 main.go:130] libmachine: () Calling .GetMachineName
	I0812 23:57:10.446434  686924 main.go:130] libmachine: (multinode-20210812235331-679351-m03) Calling .GetState
	I0812 23:57:10.449565  686924 status.go:328] multinode-20210812235331-679351-m03 host status = "Stopped" (err=<nil>)
	I0812 23:57:10.449584  686924 status.go:341] host is not running, skipping remaining checks
	I0812 23:57:10.449591  686924 status.go:255] multinode-20210812235331-679351-m03 status: &{Name:multinode-20210812235331-679351-m03 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
--- PASS: TestMultiNode/serial/StopNode (2.95s)

                                                
                                    
x
+
TestMultiNode/serial/StartAfterStop (72.14s)

                                                
                                                
=== RUN   TestMultiNode/serial/StartAfterStop
multinode_test.go:235: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 node start m03 --alsologtostderr
E0812 23:57:31.394286  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
multinode_test.go:235: (dbg) Done: out/minikube-linux-amd64 -p multinode-20210812235331-679351 node start m03 --alsologtostderr: (1m11.500790649s)
multinode_test.go:242: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 status
multinode_test.go:256: (dbg) Run:  kubectl get nodes
--- PASS: TestMultiNode/serial/StartAfterStop (72.14s)

                                                
                                    
x
+
TestMultiNode/serial/RestartKeepsNodes (488.29s)

                                                
                                                
=== RUN   TestMultiNode/serial/RestartKeepsNodes
multinode_test.go:264: (dbg) Run:  out/minikube-linux-amd64 node list -p multinode-20210812235331-679351
multinode_test.go:271: (dbg) Run:  out/minikube-linux-amd64 stop -p multinode-20210812235331-679351
E0812 23:58:49.240722  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0812 23:58:53.314969  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0813 00:01:09.471327  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
multinode_test.go:271: (dbg) Done: out/minikube-linux-amd64 stop -p multinode-20210812235331-679351: (3m6.261977866s)
multinode_test.go:276: (dbg) Run:  out/minikube-linux-amd64 start -p multinode-20210812235331-679351 --wait=true -v=8 --alsologtostderr
E0813 00:01:37.155344  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0813 00:03:49.240822  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0813 00:05:13.177797  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0813 00:06:09.471182  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
multinode_test.go:276: (dbg) Done: out/minikube-linux-amd64 start -p multinode-20210812235331-679351 --wait=true -v=8 --alsologtostderr: (5m1.914267048s)
multinode_test.go:281: (dbg) Run:  out/minikube-linux-amd64 node list -p multinode-20210812235331-679351
--- PASS: TestMultiNode/serial/RestartKeepsNodes (488.29s)

                                                
                                    
x
+
TestMultiNode/serial/DeleteNode (2.15s)

                                                
                                                
=== RUN   TestMultiNode/serial/DeleteNode
multinode_test.go:375: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 node delete m03
multinode_test.go:375: (dbg) Done: out/minikube-linux-amd64 -p multinode-20210812235331-679351 node delete m03: (1.459544135s)
multinode_test.go:381: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 status --alsologtostderr
multinode_test.go:405: (dbg) Run:  kubectl get nodes
multinode_test.go:413: (dbg) Run:  kubectl get nodes -o "go-template='{{range .items}}{{range .status.conditions}}{{if eq .type "Ready"}} {{.status}}{{"\n"}}{{end}}{{end}}{{end}}'"
--- PASS: TestMultiNode/serial/DeleteNode (2.15s)

                                                
                                    
x
+
TestMultiNode/serial/StopMultiNode (184.41s)

                                                
                                                
=== RUN   TestMultiNode/serial/StopMultiNode
multinode_test.go:295: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 stop
E0813 00:08:49.241037  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
multinode_test.go:295: (dbg) Done: out/minikube-linux-amd64 -p multinode-20210812235331-679351 stop: (3m4.234390945s)
multinode_test.go:301: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 status
multinode_test.go:301: (dbg) Non-zero exit: out/minikube-linux-amd64 -p multinode-20210812235331-679351 status: exit status 7 (85.348451ms)

                                                
                                                
-- stdout --
	multinode-20210812235331-679351
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	multinode-20210812235331-679351-m02
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
multinode_test.go:308: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 status --alsologtostderr
multinode_test.go:308: (dbg) Non-zero exit: out/minikube-linux-amd64 -p multinode-20210812235331-679351 status --alsologtostderr: exit status 7 (85.921911ms)

                                                
                                                
-- stdout --
	multinode-20210812235331-679351
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	multinode-20210812235331-679351-m02
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0813 00:09:37.404298  688188 out.go:298] Setting OutFile to fd 1 ...
	I0813 00:09:37.404397  688188 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0813 00:09:37.404406  688188 out.go:311] Setting ErrFile to fd 2...
	I0813 00:09:37.404409  688188 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0813 00:09:37.404879  688188 root.go:313] Updating PATH: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/bin
	I0813 00:09:37.405272  688188 out.go:305] Setting JSON to false
	I0813 00:09:37.405335  688188 mustload.go:65] Loading cluster: multinode-20210812235331-679351
	I0813 00:09:37.406004  688188 status.go:253] checking status of multinode-20210812235331-679351 ...
	I0813 00:09:37.406378  688188 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:09:37.406425  688188 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:09:37.417547  688188 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:41803
	I0813 00:09:37.418009  688188 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:09:37.418592  688188 main.go:130] libmachine: Using API Version  1
	I0813 00:09:37.418616  688188 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:09:37.418977  688188 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:09:37.419161  688188 main.go:130] libmachine: (multinode-20210812235331-679351) Calling .GetState
	I0813 00:09:37.422175  688188 status.go:328] multinode-20210812235331-679351 host status = "Stopped" (err=<nil>)
	I0813 00:09:37.422194  688188 status.go:341] host is not running, skipping remaining checks
	I0813 00:09:37.422201  688188 status.go:255] multinode-20210812235331-679351 status: &{Name:multinode-20210812235331-679351 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0813 00:09:37.422229  688188 status.go:253] checking status of multinode-20210812235331-679351-m02 ...
	I0813 00:09:37.422516  688188 main.go:130] libmachine: Found binary path at /home/jenkins/workspace/KVM_Linux_containerd_integration/out/docker-machine-driver-kvm2
	I0813 00:09:37.422552  688188 main.go:130] libmachine: Launching plugin server for driver kvm2
	I0813 00:09:37.433523  688188 main.go:130] libmachine: Plugin server listening at address 127.0.0.1:44069
	I0813 00:09:37.433941  688188 main.go:130] libmachine: () Calling .GetVersion
	I0813 00:09:37.434395  688188 main.go:130] libmachine: Using API Version  1
	I0813 00:09:37.434420  688188 main.go:130] libmachine: () Calling .SetConfigRaw
	I0813 00:09:37.434756  688188 main.go:130] libmachine: () Calling .GetMachineName
	I0813 00:09:37.434945  688188 main.go:130] libmachine: (multinode-20210812235331-679351-m02) Calling .GetState
	I0813 00:09:37.437595  688188 status.go:328] multinode-20210812235331-679351-m02 host status = "Stopped" (err=<nil>)
	I0813 00:09:37.437610  688188 status.go:341] host is not running, skipping remaining checks
	I0813 00:09:37.437615  688188 status.go:255] multinode-20210812235331-679351-m02 status: &{Name:multinode-20210812235331-679351-m02 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
--- PASS: TestMultiNode/serial/StopMultiNode (184.41s)

                                                
                                    
x
+
TestMultiNode/serial/RestartMultiNode (202.45s)

                                                
                                                
=== RUN   TestMultiNode/serial/RestartMultiNode
multinode_test.go:335: (dbg) Run:  out/minikube-linux-amd64 start -p multinode-20210812235331-679351 --wait=true -v=8 --alsologtostderr --driver=kvm2  --container-runtime=containerd
E0813 00:11:09.471380  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0813 00:12:32.515951  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
multinode_test.go:335: (dbg) Done: out/minikube-linux-amd64 start -p multinode-20210812235331-679351 --wait=true -v=8 --alsologtostderr --driver=kvm2  --container-runtime=containerd: (3m21.870811699s)
multinode_test.go:341: (dbg) Run:  out/minikube-linux-amd64 -p multinode-20210812235331-679351 status --alsologtostderr
multinode_test.go:355: (dbg) Run:  kubectl get nodes
multinode_test.go:363: (dbg) Run:  kubectl get nodes -o "go-template='{{range .items}}{{range .status.conditions}}{{if eq .type "Ready"}} {{.status}}{{"\n"}}{{end}}{{end}}{{end}}'"
--- PASS: TestMultiNode/serial/RestartMultiNode (202.45s)

                                                
                                    
x
+
TestMultiNode/serial/ValidateNameConflict (72.25s)

                                                
                                                
=== RUN   TestMultiNode/serial/ValidateNameConflict
multinode_test.go:424: (dbg) Run:  out/minikube-linux-amd64 node list -p multinode-20210812235331-679351
multinode_test.go:433: (dbg) Run:  out/minikube-linux-amd64 start -p multinode-20210812235331-679351-m02 --driver=kvm2  --container-runtime=containerd
multinode_test.go:433: (dbg) Non-zero exit: out/minikube-linux-amd64 start -p multinode-20210812235331-679351-m02 --driver=kvm2  --container-runtime=containerd: exit status 14 (100.521207ms)

                                                
                                                
-- stdout --
	* [multinode-20210812235331-679351-m02] minikube v1.22.0 on Debian 9.13 (kvm/amd64)
	  - KUBECONFIG=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/kubeconfig
	  - MINIKUBE_BIN=out/minikube-linux-amd64
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube
	  - MINIKUBE_LOCATION=12230
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	! Profile name 'multinode-20210812235331-679351-m02' is duplicated with machine name 'multinode-20210812235331-679351-m02' in profile 'multinode-20210812235331-679351'
	X Exiting due to MK_USAGE: Profile name should be unique

                                                
                                                
** /stderr **
multinode_test.go:441: (dbg) Run:  out/minikube-linux-amd64 start -p multinode-20210812235331-679351-m03 --driver=kvm2  --container-runtime=containerd
E0813 00:13:49.240475  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
multinode_test.go:441: (dbg) Done: out/minikube-linux-amd64 start -p multinode-20210812235331-679351-m03 --driver=kvm2  --container-runtime=containerd: (1m10.601949307s)
multinode_test.go:448: (dbg) Run:  out/minikube-linux-amd64 node add -p multinode-20210812235331-679351
multinode_test.go:448: (dbg) Non-zero exit: out/minikube-linux-amd64 node add -p multinode-20210812235331-679351: exit status 80 (260.251908ms)

                                                
                                                
-- stdout --
	* Adding node m03 to cluster multinode-20210812235331-679351
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to GUEST_NODE_ADD: Node multinode-20210812235331-679351-m03 already exists in multinode-20210812235331-679351-m03 profile
	* 
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	[warning]: invalid value provided to Color, using default
	╭─────────────────────────────────────────────────────────────────────────────╮
	│                                                                             │
	│    * If the above advice does not help, please let us know:                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose               │
	│                                                                             │
	│    * Please attach the following file to the GitHub issue:                  │
	│    * - /tmp/minikube_node_040ea7097fd6ed71e65be9a474587f81f0ccd21d_0.log    │
	│                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────╯

                                                
                                                
** /stderr **
multinode_test.go:453: (dbg) Run:  out/minikube-linux-amd64 delete -p multinode-20210812235331-679351-m03
multinode_test.go:453: (dbg) Done: out/minikube-linux-amd64 delete -p multinode-20210812235331-679351-m03: (1.226792693s)
--- PASS: TestMultiNode/serial/ValidateNameConflict (72.25s)

                                                
                                    
x
+
TestDebPackageInstall/install_amd64_debian:sid/minikube (0s)

                                                
                                                
=== RUN   TestDebPackageInstall/install_amd64_debian:sid/minikube
--- PASS: TestDebPackageInstall/install_amd64_debian:sid/minikube (0.00s)

                                                
                                    
x
+
TestDebPackageInstall/install_amd64_debian:sid/kvm2-driver (11.32s)

                                                
                                                
=== RUN   TestDebPackageInstall/install_amd64_debian:sid/kvm2-driver
pkg_install_test.go:104: (dbg) Run:  docker run --rm -v/home/jenkins/workspace/KVM_Linux_containerd_integration/out:/var/tmp debian:sid sh -c "apt-get update; apt-get install -y libvirt0; dpkg -i /var/tmp/docker-machine-driver-kvm2_1.22.0-0_amd64.deb"
pkg_install_test.go:104: (dbg) Done: docker run --rm -v/home/jenkins/workspace/KVM_Linux_containerd_integration/out:/var/tmp debian:sid sh -c "apt-get update; apt-get install -y libvirt0; dpkg -i /var/tmp/docker-machine-driver-kvm2_1.22.0-0_amd64.deb": (11.323021823s)
--- PASS: TestDebPackageInstall/install_amd64_debian:sid/kvm2-driver (11.32s)

                                                
                                    
x
+
TestDebPackageInstall/install_amd64_debian:latest/minikube (0s)

                                                
                                                
=== RUN   TestDebPackageInstall/install_amd64_debian:latest/minikube
--- PASS: TestDebPackageInstall/install_amd64_debian:latest/minikube (0.00s)

                                                
                                    
x
+
TestDebPackageInstall/install_amd64_debian:latest/kvm2-driver (10.48s)

                                                
                                                
=== RUN   TestDebPackageInstall/install_amd64_debian:latest/kvm2-driver
pkg_install_test.go:104: (dbg) Run:  docker run --rm -v/home/jenkins/workspace/KVM_Linux_containerd_integration/out:/var/tmp debian:latest sh -c "apt-get update; apt-get install -y libvirt0; dpkg -i /var/tmp/docker-machine-driver-kvm2_1.22.0-0_amd64.deb"
pkg_install_test.go:104: (dbg) Done: docker run --rm -v/home/jenkins/workspace/KVM_Linux_containerd_integration/out:/var/tmp debian:latest sh -c "apt-get update; apt-get install -y libvirt0; dpkg -i /var/tmp/docker-machine-driver-kvm2_1.22.0-0_amd64.deb": (10.482267489s)
--- PASS: TestDebPackageInstall/install_amd64_debian:latest/kvm2-driver (10.48s)

                                                
                                    
x
+
TestDebPackageInstall/install_amd64_debian:10/minikube (0s)

                                                
                                                
=== RUN   TestDebPackageInstall/install_amd64_debian:10/minikube
--- PASS: TestDebPackageInstall/install_amd64_debian:10/minikube (0.00s)

                                                
                                    
x
+
TestDebPackageInstall/install_amd64_debian:10/kvm2-driver (9.94s)

                                                
                                                
=== RUN   TestDebPackageInstall/install_amd64_debian:10/kvm2-driver
pkg_install_test.go:104: (dbg) Run:  docker run --rm -v/home/jenkins/workspace/KVM_Linux_containerd_integration/out:/var/tmp debian:10 sh -c "apt-get update; apt-get install -y libvirt0; dpkg -i /var/tmp/docker-machine-driver-kvm2_1.22.0-0_amd64.deb"
pkg_install_test.go:104: (dbg) Done: docker run --rm -v/home/jenkins/workspace/KVM_Linux_containerd_integration/out:/var/tmp debian:10 sh -c "apt-get update; apt-get install -y libvirt0; dpkg -i /var/tmp/docker-machine-driver-kvm2_1.22.0-0_amd64.deb": (9.944839294s)
--- PASS: TestDebPackageInstall/install_amd64_debian:10/kvm2-driver (9.94s)

                                                
                                    
x
+
TestDebPackageInstall/install_amd64_debian:9/minikube (0s)

                                                
                                                
=== RUN   TestDebPackageInstall/install_amd64_debian:9/minikube
--- PASS: TestDebPackageInstall/install_amd64_debian:9/minikube (0.00s)

                                                
                                    
x
+
TestDebPackageInstall/install_amd64_debian:9/kvm2-driver (8.68s)

                                                
                                                
=== RUN   TestDebPackageInstall/install_amd64_debian:9/kvm2-driver
pkg_install_test.go:104: (dbg) Run:  docker run --rm -v/home/jenkins/workspace/KVM_Linux_containerd_integration/out:/var/tmp debian:9 sh -c "apt-get update; apt-get install -y libvirt0; dpkg -i /var/tmp/docker-machine-driver-kvm2_1.22.0-0_amd64.deb"
pkg_install_test.go:104: (dbg) Done: docker run --rm -v/home/jenkins/workspace/KVM_Linux_containerd_integration/out:/var/tmp debian:9 sh -c "apt-get update; apt-get install -y libvirt0; dpkg -i /var/tmp/docker-machine-driver-kvm2_1.22.0-0_amd64.deb": (8.68451649s)
--- PASS: TestDebPackageInstall/install_amd64_debian:9/kvm2-driver (8.68s)

                                                
                                    
x
+
TestDebPackageInstall/install_amd64_ubuntu:latest/minikube (0s)

                                                
                                                
=== RUN   TestDebPackageInstall/install_amd64_ubuntu:latest/minikube
--- PASS: TestDebPackageInstall/install_amd64_ubuntu:latest/minikube (0.00s)

                                                
                                    
x
+
TestDebPackageInstall/install_amd64_ubuntu:latest/kvm2-driver (14.85s)

                                                
                                                
=== RUN   TestDebPackageInstall/install_amd64_ubuntu:latest/kvm2-driver
pkg_install_test.go:104: (dbg) Run:  docker run --rm -v/home/jenkins/workspace/KVM_Linux_containerd_integration/out:/var/tmp ubuntu:latest sh -c "apt-get update; apt-get install -y libvirt0; dpkg -i /var/tmp/docker-machine-driver-kvm2_1.22.0-0_amd64.deb"
pkg_install_test.go:104: (dbg) Done: docker run --rm -v/home/jenkins/workspace/KVM_Linux_containerd_integration/out:/var/tmp ubuntu:latest sh -c "apt-get update; apt-get install -y libvirt0; dpkg -i /var/tmp/docker-machine-driver-kvm2_1.22.0-0_amd64.deb": (14.853267585s)
--- PASS: TestDebPackageInstall/install_amd64_ubuntu:latest/kvm2-driver (14.85s)

                                                
                                    
x
+
TestDebPackageInstall/install_amd64_ubuntu:20.10/minikube (0s)

                                                
                                                
=== RUN   TestDebPackageInstall/install_amd64_ubuntu:20.10/minikube
--- PASS: TestDebPackageInstall/install_amd64_ubuntu:20.10/minikube (0.00s)

                                                
                                    
x
+
TestDebPackageInstall/install_amd64_ubuntu:20.10/kvm2-driver (14.96s)

                                                
                                                
=== RUN   TestDebPackageInstall/install_amd64_ubuntu:20.10/kvm2-driver
pkg_install_test.go:104: (dbg) Run:  docker run --rm -v/home/jenkins/workspace/KVM_Linux_containerd_integration/out:/var/tmp ubuntu:20.10 sh -c "apt-get update; apt-get install -y libvirt0; dpkg -i /var/tmp/docker-machine-driver-kvm2_1.22.0-0_amd64.deb"
pkg_install_test.go:104: (dbg) Done: docker run --rm -v/home/jenkins/workspace/KVM_Linux_containerd_integration/out:/var/tmp ubuntu:20.10 sh -c "apt-get update; apt-get install -y libvirt0; dpkg -i /var/tmp/docker-machine-driver-kvm2_1.22.0-0_amd64.deb": (14.958199695s)
--- PASS: TestDebPackageInstall/install_amd64_ubuntu:20.10/kvm2-driver (14.96s)

                                                
                                    
x
+
TestDebPackageInstall/install_amd64_ubuntu:20.04/minikube (0s)

                                                
                                                
=== RUN   TestDebPackageInstall/install_amd64_ubuntu:20.04/minikube
--- PASS: TestDebPackageInstall/install_amd64_ubuntu:20.04/minikube (0.00s)

                                                
                                    
x
+
TestDebPackageInstall/install_amd64_ubuntu:20.04/kvm2-driver (14.35s)

                                                
                                                
=== RUN   TestDebPackageInstall/install_amd64_ubuntu:20.04/kvm2-driver
pkg_install_test.go:104: (dbg) Run:  docker run --rm -v/home/jenkins/workspace/KVM_Linux_containerd_integration/out:/var/tmp ubuntu:20.04 sh -c "apt-get update; apt-get install -y libvirt0; dpkg -i /var/tmp/docker-machine-driver-kvm2_1.22.0-0_amd64.deb"
pkg_install_test.go:104: (dbg) Done: docker run --rm -v/home/jenkins/workspace/KVM_Linux_containerd_integration/out:/var/tmp ubuntu:20.04 sh -c "apt-get update; apt-get install -y libvirt0; dpkg -i /var/tmp/docker-machine-driver-kvm2_1.22.0-0_amd64.deb": (14.349500346s)
--- PASS: TestDebPackageInstall/install_amd64_ubuntu:20.04/kvm2-driver (14.35s)

                                                
                                    
x
+
TestDebPackageInstall/install_amd64_ubuntu:18.04/minikube (0s)

                                                
                                                
=== RUN   TestDebPackageInstall/install_amd64_ubuntu:18.04/minikube
--- PASS: TestDebPackageInstall/install_amd64_ubuntu:18.04/minikube (0.00s)

                                                
                                    
x
+
TestDebPackageInstall/install_amd64_ubuntu:18.04/kvm2-driver (13.76s)

                                                
                                                
=== RUN   TestDebPackageInstall/install_amd64_ubuntu:18.04/kvm2-driver
pkg_install_test.go:104: (dbg) Run:  docker run --rm -v/home/jenkins/workspace/KVM_Linux_containerd_integration/out:/var/tmp ubuntu:18.04 sh -c "apt-get update; apt-get install -y libvirt0; dpkg -i /var/tmp/docker-machine-driver-kvm2_1.22.0-0_amd64.deb"
pkg_install_test.go:104: (dbg) Done: docker run --rm -v/home/jenkins/workspace/KVM_Linux_containerd_integration/out:/var/tmp ubuntu:18.04 sh -c "apt-get update; apt-get install -y libvirt0; dpkg -i /var/tmp/docker-machine-driver-kvm2_1.22.0-0_amd64.deb": (13.75692932s)
--- PASS: TestDebPackageInstall/install_amd64_ubuntu:18.04/kvm2-driver (13.76s)

                                                
                                    
x
+
TestPreload (147.51s)

                                                
                                                
=== RUN   TestPreload
preload_test.go:48: (dbg) Run:  out/minikube-linux-amd64 start -p test-preload-20210813001552-679351 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.17.0
E0813 00:16:09.471791  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
preload_test.go:48: (dbg) Done: out/minikube-linux-amd64 start -p test-preload-20210813001552-679351 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.17.0: (1m32.320076015s)
preload_test.go:61: (dbg) Run:  out/minikube-linux-amd64 ssh -p test-preload-20210813001552-679351 -- sudo crictl pull busybox
preload_test.go:61: (dbg) Done: out/minikube-linux-amd64 ssh -p test-preload-20210813001552-679351 -- sudo crictl pull busybox: (1.231806538s)
preload_test.go:71: (dbg) Run:  out/minikube-linux-amd64 start -p test-preload-20210813001552-679351 --memory=2200 --alsologtostderr -v=1 --wait=true --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.17.3
preload_test.go:71: (dbg) Done: out/minikube-linux-amd64 start -p test-preload-20210813001552-679351 --memory=2200 --alsologtostderr -v=1 --wait=true --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.17.3: (52.575418713s)
preload_test.go:80: (dbg) Run:  out/minikube-linux-amd64 ssh -p test-preload-20210813001552-679351 -- sudo crictl image ls
helpers_test.go:176: Cleaning up "test-preload-20210813001552-679351" profile ...
helpers_test.go:179: (dbg) Run:  out/minikube-linux-amd64 delete -p test-preload-20210813001552-679351
helpers_test.go:179: (dbg) Done: out/minikube-linux-amd64 delete -p test-preload-20210813001552-679351: (1.153945849s)
--- PASS: TestPreload (147.51s)

                                                
                                    
x
+
TestScheduledStopUnix (90.87s)

                                                
                                                
=== RUN   TestScheduledStopUnix
scheduled_stop_test.go:128: (dbg) Run:  out/minikube-linux-amd64 start -p scheduled-stop-20210813001820-679351 --memory=2048 --driver=kvm2  --container-runtime=containerd
E0813 00:18:49.240835  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
scheduled_stop_test.go:128: (dbg) Done: out/minikube-linux-amd64 start -p scheduled-stop-20210813001820-679351 --memory=2048 --driver=kvm2  --container-runtime=containerd: (1m1.959413686s)
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-amd64 stop -p scheduled-stop-20210813001820-679351 --schedule 5m
scheduled_stop_test.go:191: (dbg) Run:  out/minikube-linux-amd64 status --format={{.TimeToStop}} -p scheduled-stop-20210813001820-679351 -n scheduled-stop-20210813001820-679351
scheduled_stop_test.go:169: signal error was:  <nil>
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-amd64 stop -p scheduled-stop-20210813001820-679351 --schedule 8s
scheduled_stop_test.go:169: signal error was:  os: process already finished
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-amd64 stop -p scheduled-stop-20210813001820-679351 --cancel-scheduled
scheduled_stop_test.go:176: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Host}} -p scheduled-stop-20210813001820-679351 -n scheduled-stop-20210813001820-679351
scheduled_stop_test.go:205: (dbg) Run:  out/minikube-linux-amd64 status -p scheduled-stop-20210813001820-679351
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-amd64 stop -p scheduled-stop-20210813001820-679351 --schedule 5s
scheduled_stop_test.go:169: signal error was:  os: process already finished
scheduled_stop_test.go:205: (dbg) Run:  out/minikube-linux-amd64 status -p scheduled-stop-20210813001820-679351
scheduled_stop_test.go:205: (dbg) Non-zero exit: out/minikube-linux-amd64 status -p scheduled-stop-20210813001820-679351: exit status 7 (70.018777ms)

                                                
                                                
-- stdout --
	scheduled-stop-20210813001820-679351
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	

                                                
                                                
-- /stdout --
scheduled_stop_test.go:176: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Host}} -p scheduled-stop-20210813001820-679351 -n scheduled-stop-20210813001820-679351
scheduled_stop_test.go:176: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.Host}} -p scheduled-stop-20210813001820-679351 -n scheduled-stop-20210813001820-679351: exit status 7 (66.943274ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
scheduled_stop_test.go:176: status error: exit status 7 (may be ok)
helpers_test.go:176: Cleaning up "scheduled-stop-20210813001820-679351" profile ...
helpers_test.go:179: (dbg) Run:  out/minikube-linux-amd64 delete -p scheduled-stop-20210813001820-679351
--- PASS: TestScheduledStopUnix (90.87s)

                                                
                                    
x
+
TestRunningBinaryUpgrade (138.53s)

                                                
                                                
=== RUN   TestRunningBinaryUpgrade
=== PAUSE TestRunningBinaryUpgrade

                                                
                                                

                                                
                                                
=== CONT  TestRunningBinaryUpgrade
version_upgrade_test.go:128: (dbg) Run:  /tmp/minikube-v1.16.0.140511811.exe start -p running-upgrade-20210813002331-679351 --memory=2200 --vm-driver=kvm2  --container-runtime=containerd

                                                
                                                
=== CONT  TestRunningBinaryUpgrade
version_upgrade_test.go:128: (dbg) Done: /tmp/minikube-v1.16.0.140511811.exe start -p running-upgrade-20210813002331-679351 --memory=2200 --vm-driver=kvm2  --container-runtime=containerd: (1m12.785401596s)
version_upgrade_test.go:138: (dbg) Run:  out/minikube-linux-amd64 start -p running-upgrade-20210813002331-679351 --memory=2200 --alsologtostderr -v=1 --driver=kvm2  --container-runtime=containerd

                                                
                                                
=== CONT  TestRunningBinaryUpgrade
version_upgrade_test.go:138: (dbg) Done: out/minikube-linux-amd64 start -p running-upgrade-20210813002331-679351 --memory=2200 --alsologtostderr -v=1 --driver=kvm2  --container-runtime=containerd: (1m3.884870152s)
helpers_test.go:176: Cleaning up "running-upgrade-20210813002331-679351" profile ...
helpers_test.go:179: (dbg) Run:  out/minikube-linux-amd64 delete -p running-upgrade-20210813002331-679351
helpers_test.go:179: (dbg) Done: out/minikube-linux-amd64 delete -p running-upgrade-20210813002331-679351: (1.438837502s)
--- PASS: TestRunningBinaryUpgrade (138.53s)

                                                
                                    
x
+
TestKubernetesUpgrade (258.98s)

                                                
                                                
=== RUN   TestKubernetesUpgrade
=== PAUSE TestKubernetesUpgrade

                                                
                                                

                                                
                                                
=== CONT  TestKubernetesUpgrade
version_upgrade_test.go:224: (dbg) Run:  out/minikube-linux-amd64 start -p kubernetes-upgrade-20210813002240-679351 --memory=2200 --kubernetes-version=v1.14.0 --alsologtostderr -v=1 --driver=kvm2  --container-runtime=containerd

                                                
                                                
=== CONT  TestKubernetesUpgrade
version_upgrade_test.go:224: (dbg) Done: out/minikube-linux-amd64 start -p kubernetes-upgrade-20210813002240-679351 --memory=2200 --kubernetes-version=v1.14.0 --alsologtostderr -v=1 --driver=kvm2  --container-runtime=containerd: (1m24.550553945s)
version_upgrade_test.go:229: (dbg) Run:  out/minikube-linux-amd64 stop -p kubernetes-upgrade-20210813002240-679351

                                                
                                                
=== CONT  TestKubernetesUpgrade
version_upgrade_test.go:229: (dbg) Done: out/minikube-linux-amd64 stop -p kubernetes-upgrade-20210813002240-679351: (6.176157993s)
version_upgrade_test.go:234: (dbg) Run:  out/minikube-linux-amd64 -p kubernetes-upgrade-20210813002240-679351 status --format={{.Host}}
version_upgrade_test.go:234: (dbg) Non-zero exit: out/minikube-linux-amd64 -p kubernetes-upgrade-20210813002240-679351 status --format={{.Host}}: exit status 7 (73.818596ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
version_upgrade_test.go:236: status error: exit status 7 (may be ok)
version_upgrade_test.go:245: (dbg) Run:  out/minikube-linux-amd64 start -p kubernetes-upgrade-20210813002240-679351 --memory=2200 --kubernetes-version=v1.22.0-rc.0 --alsologtostderr -v=1 --driver=kvm2  --container-runtime=containerd

                                                
                                                
=== CONT  TestKubernetesUpgrade
version_upgrade_test.go:245: (dbg) Done: out/minikube-linux-amd64 start -p kubernetes-upgrade-20210813002240-679351 --memory=2200 --kubernetes-version=v1.22.0-rc.0 --alsologtostderr -v=1 --driver=kvm2  --container-runtime=containerd: (1m44.141198692s)
version_upgrade_test.go:250: (dbg) Run:  kubectl --context kubernetes-upgrade-20210813002240-679351 version --output=json
version_upgrade_test.go:269: Attempting to downgrade Kubernetes (should fail)
version_upgrade_test.go:271: (dbg) Run:  out/minikube-linux-amd64 start -p kubernetes-upgrade-20210813002240-679351 --memory=2200 --kubernetes-version=v1.14.0 --driver=kvm2  --container-runtime=containerd
version_upgrade_test.go:271: (dbg) Non-zero exit: out/minikube-linux-amd64 start -p kubernetes-upgrade-20210813002240-679351 --memory=2200 --kubernetes-version=v1.14.0 --driver=kvm2  --container-runtime=containerd: exit status 106 (132.269503ms)

                                                
                                                
-- stdout --
	* [kubernetes-upgrade-20210813002240-679351] minikube v1.22.0 on Debian 9.13 (kvm/amd64)
	  - KUBECONFIG=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/kubeconfig
	  - MINIKUBE_BIN=out/minikube-linux-amd64
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube
	  - MINIKUBE_LOCATION=12230
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to K8S_DOWNGRADE_UNSUPPORTED: Unable to safely downgrade existing Kubernetes v1.22.0-rc.0 cluster to v1.14.0
	* Suggestion: 
	
	    1) Recreate the cluster with Kubernetes 1.14.0, by running:
	    
	    minikube delete -p kubernetes-upgrade-20210813002240-679351
	    minikube start -p kubernetes-upgrade-20210813002240-679351 --kubernetes-version=v1.14.0
	    
	    2) Create a second cluster with Kubernetes 1.14.0, by running:
	    
	    minikube start -p kubernetes-upgrade-20210813002240-6793512 --kubernetes-version=v1.14.0
	    
	    3) Use the existing cluster at version Kubernetes 1.22.0-rc.0, by running:
	    
	    minikube start -p kubernetes-upgrade-20210813002240-679351 --kubernetes-version=v1.22.0-rc.0
	    

                                                
                                                
** /stderr **
version_upgrade_test.go:275: Attempting restart after unsuccessful downgrade
version_upgrade_test.go:277: (dbg) Run:  out/minikube-linux-amd64 start -p kubernetes-upgrade-20210813002240-679351 --memory=2200 --kubernetes-version=v1.22.0-rc.0 --alsologtostderr -v=1 --driver=kvm2  --container-runtime=containerd
E0813 00:26:09.471496  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory

                                                
                                                
=== CONT  TestKubernetesUpgrade
version_upgrade_test.go:277: (dbg) Done: out/minikube-linux-amd64 start -p kubernetes-upgrade-20210813002240-679351 --memory=2200 --kubernetes-version=v1.22.0-rc.0 --alsologtostderr -v=1 --driver=kvm2  --container-runtime=containerd: (1m2.589349298s)
helpers_test.go:176: Cleaning up "kubernetes-upgrade-20210813002240-679351" profile ...
helpers_test.go:179: (dbg) Run:  out/minikube-linux-amd64 delete -p kubernetes-upgrade-20210813002240-679351
helpers_test.go:179: (dbg) Done: out/minikube-linux-amd64 delete -p kubernetes-upgrade-20210813002240-679351: (1.249522364s)
--- PASS: TestKubernetesUpgrade (258.98s)

                                                
                                    
x
+
TestPause/serial/Start (178.13s)

                                                
                                                
=== RUN   TestPause/serial/Start
pause_test.go:77: (dbg) Run:  out/minikube-linux-amd64 start -p pause-20210813001951-679351 --memory=2048 --install-addons=false --wait=all --driver=kvm2  --container-runtime=containerd

                                                
                                                
=== CONT  TestPause/serial/Start
pause_test.go:77: (dbg) Done: out/minikube-linux-amd64 start -p pause-20210813001951-679351 --memory=2048 --install-addons=false --wait=all --driver=kvm2  --container-runtime=containerd: (2m58.128338136s)
--- PASS: TestPause/serial/Start (178.13s)

                                                
                                    
x
+
TestNetworkPlugins/group/false (0.42s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/false
net_test.go:213: (dbg) Run:  out/minikube-linux-amd64 start -p false-20210813002105-679351 --memory=2048 --alsologtostderr --cni=false --driver=kvm2  --container-runtime=containerd
net_test.go:213: (dbg) Non-zero exit: out/minikube-linux-amd64 start -p false-20210813002105-679351 --memory=2048 --alsologtostderr --cni=false --driver=kvm2  --container-runtime=containerd: exit status 14 (135.495275ms)

                                                
                                                
-- stdout --
	* [false-20210813002105-679351] minikube v1.22.0 on Debian 9.13 (kvm/amd64)
	  - KUBECONFIG=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/kubeconfig
	  - MINIKUBE_BIN=out/minikube-linux-amd64
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube
	  - MINIKUBE_LOCATION=12230
	* Using the kvm2 driver based on user configuration
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0813 00:21:05.424621  714868 out.go:298] Setting OutFile to fd 1 ...
	I0813 00:21:05.424693  714868 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0813 00:21:05.424697  714868 out.go:311] Setting ErrFile to fd 2...
	I0813 00:21:05.424700  714868 out.go:345] TERM=,COLORTERM=, which probably does not support color
	I0813 00:21:05.424793  714868 root.go:313] Updating PATH: /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/bin
	I0813 00:21:05.425067  714868 out.go:305] Setting JSON to false
	I0813 00:21:05.461241  714868 start.go:111] hostinfo: {"hostname":"debian-jenkins-agent-10","uptime":14628,"bootTime":1628799437,"procs":191,"os":"linux","platform":"debian","platformFamily":"debian","platformVersion":"9.13","kernelVersion":"4.9.0-16-amd64","kernelArch":"x86_64","virtualizationSystem":"kvm","virtualizationRole":"guest","hostId":"c29e0b88-ef83-6765-d2fa-208fdce1af32"}
	I0813 00:21:05.461364  714868 start.go:121] virtualization: kvm guest
	I0813 00:21:05.464163  714868 out.go:177] * [false-20210813002105-679351] minikube v1.22.0 on Debian 9.13 (kvm/amd64)
	I0813 00:21:05.465547  714868 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/kubeconfig
	I0813 00:21:05.464323  714868 notify.go:169] Checking for updates...
	I0813 00:21:05.466979  714868 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-amd64
	I0813 00:21:05.468400  714868 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube
	I0813 00:21:05.469747  714868 out.go:177]   - MINIKUBE_LOCATION=12230
	I0813 00:21:05.470418  714868 driver.go:335] Setting default libvirt URI to qemu:///system
	I0813 00:21:05.504766  714868 out.go:177] * Using the kvm2 driver based on user configuration
	I0813 00:21:05.504790  714868 start.go:278] selected driver: kvm2
	I0813 00:21:05.504796  714868 start.go:751] validating driver "kvm2" against <nil>
	I0813 00:21:05.504814  714868 start.go:762] status for kvm2: {Installed:true Healthy:true Running:true NeedsImprovement:false Error:<nil> Reason: Fix: Doc:}
	I0813 00:21:05.506940  714868 out.go:177] 
	W0813 00:21:05.507029  714868 out.go:242] X Exiting due to MK_USAGE: The "containerd" container runtime requires CNI
	X Exiting due to MK_USAGE: The "containerd" container runtime requires CNI
	I0813 00:21:05.508318  714868 out.go:177] 

                                                
                                                
** /stderr **
helpers_test.go:176: Cleaning up "false-20210813002105-679351" profile ...
helpers_test.go:179: (dbg) Run:  out/minikube-linux-amd64 delete -p false-20210813002105-679351
--- PASS: TestNetworkPlugins/group/false (0.42s)

                                                
                                    
x
+
TestPause/serial/SecondStartNoReconfiguration (70.16s)

                                                
                                                
=== RUN   TestPause/serial/SecondStartNoReconfiguration
pause_test.go:89: (dbg) Run:  out/minikube-linux-amd64 start -p pause-20210813001951-679351 --alsologtostderr -v=1 --driver=kvm2  --container-runtime=containerd

                                                
                                                
=== CONT  TestPause/serial/SecondStartNoReconfiguration
pause_test.go:89: (dbg) Done: out/minikube-linux-amd64 start -p pause-20210813001951-679351 --alsologtostderr -v=1 --driver=kvm2  --container-runtime=containerd: (1m10.133927852s)
--- PASS: TestPause/serial/SecondStartNoReconfiguration (70.16s)

                                                
                                    
x
+
TestStoppedBinaryUpgrade/MinikubeLogs (1.53s)

                                                
                                                
=== RUN   TestStoppedBinaryUpgrade/MinikubeLogs
version_upgrade_test.go:208: (dbg) Run:  out/minikube-linux-amd64 logs -p stopped-upgrade-20210813001951-679351
version_upgrade_test.go:208: (dbg) Done: out/minikube-linux-amd64 logs -p stopped-upgrade-20210813001951-679351: (1.533830963s)
--- PASS: TestStoppedBinaryUpgrade/MinikubeLogs (1.53s)

                                                
                                    
x
+
TestNetworkPlugins/group/auto/Start (109.98s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/auto/Start
net_test.go:98: (dbg) Run:  out/minikube-linux-amd64 start -p auto-20210813002105-679351 --memory=2048 --alsologtostderr --wait=true --wait-timeout=5m --driver=kvm2  --container-runtime=containerd
E0813 00:23:49.240643  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory

                                                
                                                
=== CONT  TestNetworkPlugins/group/auto/Start
net_test.go:98: (dbg) Done: out/minikube-linux-amd64 start -p auto-20210813002105-679351 --memory=2048 --alsologtostderr --wait=true --wait-timeout=5m --driver=kvm2  --container-runtime=containerd: (1m49.974021851s)
--- PASS: TestNetworkPlugins/group/auto/Start (109.98s)

                                                
                                    
x
+
TestPause/serial/Pause (0.92s)

                                                
                                                
=== RUN   TestPause/serial/Pause
pause_test.go:107: (dbg) Run:  out/minikube-linux-amd64 pause -p pause-20210813001951-679351 --alsologtostderr -v=5
--- PASS: TestPause/serial/Pause (0.92s)

                                                
                                    
x
+
TestPause/serial/VerifyStatus (0.33s)

                                                
                                                
=== RUN   TestPause/serial/VerifyStatus
status_test.go:76: (dbg) Run:  out/minikube-linux-amd64 status -p pause-20210813001951-679351 --output=json --layout=cluster
status_test.go:76: (dbg) Non-zero exit: out/minikube-linux-amd64 status -p pause-20210813001951-679351 --output=json --layout=cluster: exit status 2 (306.093175ms)

                                                
                                                
-- stdout --
	{"Name":"pause-20210813001951-679351","StatusCode":418,"StatusName":"Paused","Step":"Done","StepDetail":"* Paused 7 containers in: kube-system, kubernetes-dashboard, storage-gluster, istio-operator","BinaryVersion":"v1.22.0","Components":{"kubeconfig":{"Name":"kubeconfig","StatusCode":200,"StatusName":"OK"}},"Nodes":[{"Name":"pause-20210813001951-679351","StatusCode":200,"StatusName":"OK","Components":{"apiserver":{"Name":"apiserver","StatusCode":418,"StatusName":"Paused"},"kubelet":{"Name":"kubelet","StatusCode":405,"StatusName":"Stopped"}}}]}

                                                
                                                
-- /stdout --
--- PASS: TestPause/serial/VerifyStatus (0.33s)

                                                
                                    
x
+
TestPause/serial/Unpause (3.06s)

                                                
                                                
=== RUN   TestPause/serial/Unpause
pause_test.go:118: (dbg) Run:  out/minikube-linux-amd64 unpause -p pause-20210813001951-679351 --alsologtostderr -v=5
pause_test.go:118: (dbg) Done: out/minikube-linux-amd64 unpause -p pause-20210813001951-679351 --alsologtostderr -v=5: (3.056606911s)
--- PASS: TestPause/serial/Unpause (3.06s)

                                                
                                    
x
+
TestPause/serial/DeletePaused (1.2s)

                                                
                                                
=== RUN   TestPause/serial/DeletePaused
pause_test.go:129: (dbg) Run:  out/minikube-linux-amd64 delete -p pause-20210813001951-679351 --alsologtostderr -v=5
pause_test.go:129: (dbg) Done: out/minikube-linux-amd64 delete -p pause-20210813001951-679351 --alsologtostderr -v=5: (1.195301901s)
--- PASS: TestPause/serial/DeletePaused (1.20s)

                                                
                                    
x
+
TestPause/serial/VerifyDeletedResources (20.24s)

                                                
                                                
=== RUN   TestPause/serial/VerifyDeletedResources
pause_test.go:139: (dbg) Run:  out/minikube-linux-amd64 profile list --output json
pause_test.go:139: (dbg) Done: out/minikube-linux-amd64 profile list --output json: (20.238986786s)
--- PASS: TestPause/serial/VerifyDeletedResources (20.24s)

                                                
                                    
x
+
TestNetworkPlugins/group/kindnet/Start (112.46s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/kindnet/Start
net_test.go:98: (dbg) Run:  out/minikube-linux-amd64 start -p kindnet-20210813002105-679351 --memory=2048 --alsologtostderr --wait=true --wait-timeout=5m --cni=kindnet --driver=kvm2  --container-runtime=containerd

                                                
                                                
=== CONT  TestNetworkPlugins/group/kindnet/Start
net_test.go:98: (dbg) Done: out/minikube-linux-amd64 start -p kindnet-20210813002105-679351 --memory=2048 --alsologtostderr --wait=true --wait-timeout=5m --cni=kindnet --driver=kvm2  --container-runtime=containerd: (1m52.464271951s)
--- PASS: TestNetworkPlugins/group/kindnet/Start (112.46s)

                                                
                                    
x
+
TestNetworkPlugins/group/auto/KubeletFlags (0.23s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/auto/KubeletFlags
net_test.go:119: (dbg) Run:  out/minikube-linux-amd64 ssh -p auto-20210813002105-679351 "pgrep -a kubelet"
--- PASS: TestNetworkPlugins/group/auto/KubeletFlags (0.23s)

                                                
                                    
x
+
TestNetworkPlugins/group/auto/NetCatPod (11.62s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/auto/NetCatPod
net_test.go:131: (dbg) Run:  kubectl --context auto-20210813002105-679351 replace --force -f testdata/netcat-deployment.yaml
net_test.go:145: (dbg) TestNetworkPlugins/group/auto/NetCatPod: waiting 15m0s for pods matching "app=netcat" in namespace "default" ...
helpers_test.go:343: "netcat-66fbc655d5-hb9r5" [53b0a15d-8687-47c6-b3dd-117da5fa4b9e] Pending / Ready:ContainersNotReady (containers with unready status: [dnsutils]) / ContainersReady:ContainersNotReady (containers with unready status: [dnsutils])
helpers_test.go:343: "netcat-66fbc655d5-hb9r5" [53b0a15d-8687-47c6-b3dd-117da5fa4b9e] Running
net_test.go:145: (dbg) TestNetworkPlugins/group/auto/NetCatPod: app=netcat healthy within 11.018130412s
--- PASS: TestNetworkPlugins/group/auto/NetCatPod (11.62s)

                                                
                                    
x
+
TestNetworkPlugins/group/auto/DNS (0.35s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/auto/DNS
net_test.go:162: (dbg) Run:  kubectl --context auto-20210813002105-679351 exec deployment/netcat -- nslookup kubernetes.default
--- PASS: TestNetworkPlugins/group/auto/DNS (0.35s)

                                                
                                    
x
+
TestNetworkPlugins/group/auto/Localhost (0.3s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/auto/Localhost
net_test.go:181: (dbg) Run:  kubectl --context auto-20210813002105-679351 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z localhost 8080"
--- PASS: TestNetworkPlugins/group/auto/Localhost (0.30s)

                                                
                                    
x
+
TestNetworkPlugins/group/auto/HairPin (0.25s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/auto/HairPin
net_test.go:231: (dbg) Run:  kubectl --context auto-20210813002105-679351 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z netcat 8080"
--- PASS: TestNetworkPlugins/group/auto/HairPin (0.25s)

                                                
                                    
x
+
TestNetworkPlugins/group/cilium/Start (159.66s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/cilium/Start
net_test.go:98: (dbg) Run:  out/minikube-linux-amd64 start -p cilium-20210813002105-679351 --memory=2048 --alsologtostderr --wait=true --wait-timeout=5m --cni=cilium --driver=kvm2  --container-runtime=containerd

                                                
                                                
=== CONT  TestNetworkPlugins/group/cilium/Start
net_test.go:98: (dbg) Done: out/minikube-linux-amd64 start -p cilium-20210813002105-679351 --memory=2048 --alsologtostderr --wait=true --wait-timeout=5m --cni=cilium --driver=kvm2  --container-runtime=containerd: (2m39.663645539s)
--- PASS: TestNetworkPlugins/group/cilium/Start (159.66s)

                                                
                                    
x
+
TestNetworkPlugins/group/kindnet/ControllerPod (5.04s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/kindnet/ControllerPod
net_test.go:106: (dbg) TestNetworkPlugins/group/kindnet/ControllerPod: waiting 10m0s for pods matching "app=kindnet" in namespace "kube-system" ...
helpers_test.go:343: "kindnet-wtsj6" [db32ff08-af6c-4a56-903e-3338d7cce9e1] Running
net_test.go:106: (dbg) TestNetworkPlugins/group/kindnet/ControllerPod: app=kindnet healthy within 5.02582929s
--- PASS: TestNetworkPlugins/group/kindnet/ControllerPod (5.04s)

                                                
                                    
x
+
TestNetworkPlugins/group/kindnet/KubeletFlags (0.26s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/kindnet/KubeletFlags
net_test.go:119: (dbg) Run:  out/minikube-linux-amd64 ssh -p kindnet-20210813002105-679351 "pgrep -a kubelet"
--- PASS: TestNetworkPlugins/group/kindnet/KubeletFlags (0.26s)

                                                
                                    
x
+
TestNetworkPlugins/group/kindnet/NetCatPod (10.96s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/kindnet/NetCatPod
net_test.go:131: (dbg) Run:  kubectl --context kindnet-20210813002105-679351 replace --force -f testdata/netcat-deployment.yaml
net_test.go:145: (dbg) TestNetworkPlugins/group/kindnet/NetCatPod: waiting 15m0s for pods matching "app=netcat" in namespace "default" ...
helpers_test.go:343: "netcat-66fbc655d5-swx68" [112763f7-8319-4cc2-b797-61dd43f87e9a] Pending / Ready:ContainersNotReady (containers with unready status: [dnsutils]) / ContainersReady:ContainersNotReady (containers with unready status: [dnsutils])
helpers_test.go:343: "netcat-66fbc655d5-swx68" [112763f7-8319-4cc2-b797-61dd43f87e9a] Running
net_test.go:145: (dbg) TestNetworkPlugins/group/kindnet/NetCatPod: app=netcat healthy within 10.092470651s
--- PASS: TestNetworkPlugins/group/kindnet/NetCatPod (10.96s)

                                                
                                    
x
+
TestNetworkPlugins/group/kindnet/DNS (3.06s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/kindnet/DNS
net_test.go:162: (dbg) Run:  kubectl --context kindnet-20210813002105-679351 exec deployment/netcat -- nslookup kubernetes.default
net_test.go:162: (dbg) Done: kubectl --context kindnet-20210813002105-679351 exec deployment/netcat -- nslookup kubernetes.default: (3.057729716s)
--- PASS: TestNetworkPlugins/group/kindnet/DNS (3.06s)

                                                
                                    
x
+
TestNetworkPlugins/group/kindnet/Localhost (0.28s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/kindnet/Localhost
net_test.go:181: (dbg) Run:  kubectl --context kindnet-20210813002105-679351 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z localhost 8080"
--- PASS: TestNetworkPlugins/group/kindnet/Localhost (0.28s)

                                                
                                    
x
+
TestNetworkPlugins/group/kindnet/HairPin (0.26s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/kindnet/HairPin
net_test.go:231: (dbg) Run:  kubectl --context kindnet-20210813002105-679351 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z netcat 8080"
--- PASS: TestNetworkPlugins/group/kindnet/HairPin (0.26s)

                                                
                                    
x
+
TestNetworkPlugins/group/custom-weave/Start (121.97s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/custom-weave/Start
net_test.go:98: (dbg) Run:  out/minikube-linux-amd64 start -p custom-weave-20210813002105-679351 --memory=2048 --alsologtostderr --wait=true --wait-timeout=5m --cni=testdata/weavenet.yaml --driver=kvm2  --container-runtime=containerd

                                                
                                                
=== CONT  TestNetworkPlugins/group/custom-weave/Start
net_test.go:98: (dbg) Done: out/minikube-linux-amd64 start -p custom-weave-20210813002105-679351 --memory=2048 --alsologtostderr --wait=true --wait-timeout=5m --cni=testdata/weavenet.yaml --driver=kvm2  --container-runtime=containerd: (2m1.969576726s)
--- PASS: TestNetworkPlugins/group/custom-weave/Start (121.97s)

                                                
                                    
x
+
TestNetworkPlugins/group/flannel/Start (126.9s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/flannel/Start
net_test.go:98: (dbg) Run:  out/minikube-linux-amd64 start -p flannel-20210813002105-679351 --memory=2048 --alsologtostderr --wait=true --wait-timeout=5m --cni=flannel --driver=kvm2  --container-runtime=containerd

                                                
                                                
=== CONT  TestNetworkPlugins/group/flannel/Start
net_test.go:98: (dbg) Done: out/minikube-linux-amd64 start -p flannel-20210813002105-679351 --memory=2048 --alsologtostderr --wait=true --wait-timeout=5m --cni=flannel --driver=kvm2  --container-runtime=containerd: (2m6.901432528s)
--- PASS: TestNetworkPlugins/group/flannel/Start (126.90s)

                                                
                                    
x
+
TestNetworkPlugins/group/cilium/ControllerPod (5.03s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/cilium/ControllerPod
net_test.go:106: (dbg) TestNetworkPlugins/group/cilium/ControllerPod: waiting 10m0s for pods matching "k8s-app=cilium" in namespace "kube-system" ...
helpers_test.go:343: "cilium-xtxnq" [77102042-7407-4466-b061-1aea04e2cf63] Running
net_test.go:106: (dbg) TestNetworkPlugins/group/cilium/ControllerPod: k8s-app=cilium healthy within 5.028820626s
--- PASS: TestNetworkPlugins/group/cilium/ControllerPod (5.03s)

                                                
                                    
x
+
TestNetworkPlugins/group/cilium/KubeletFlags (0.22s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/cilium/KubeletFlags
net_test.go:119: (dbg) Run:  out/minikube-linux-amd64 ssh -p cilium-20210813002105-679351 "pgrep -a kubelet"
--- PASS: TestNetworkPlugins/group/cilium/KubeletFlags (0.22s)

                                                
                                    
x
+
TestNetworkPlugins/group/cilium/NetCatPod (11.73s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/cilium/NetCatPod
net_test.go:131: (dbg) Run:  kubectl --context cilium-20210813002105-679351 replace --force -f testdata/netcat-deployment.yaml
net_test.go:145: (dbg) TestNetworkPlugins/group/cilium/NetCatPod: waiting 15m0s for pods matching "app=netcat" in namespace "default" ...
helpers_test.go:343: "netcat-66fbc655d5-bq68j" [9b765846-6f2d-433a-a808-7825c207330f] Pending
helpers_test.go:343: "netcat-66fbc655d5-bq68j" [9b765846-6f2d-433a-a808-7825c207330f] Pending / Ready:ContainersNotReady (containers with unready status: [dnsutils]) / ContainersReady:ContainersNotReady (containers with unready status: [dnsutils])
helpers_test.go:343: "netcat-66fbc655d5-bq68j" [9b765846-6f2d-433a-a808-7825c207330f] Running
net_test.go:145: (dbg) TestNetworkPlugins/group/cilium/NetCatPod: app=netcat healthy within 11.013539422s
--- PASS: TestNetworkPlugins/group/cilium/NetCatPod (11.73s)

                                                
                                    
x
+
TestNetworkPlugins/group/cilium/DNS (0.36s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/cilium/DNS
net_test.go:162: (dbg) Run:  kubectl --context cilium-20210813002105-679351 exec deployment/netcat -- nslookup kubernetes.default
--- PASS: TestNetworkPlugins/group/cilium/DNS (0.36s)

                                                
                                    
x
+
TestNetworkPlugins/group/cilium/Localhost (0.24s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/cilium/Localhost
net_test.go:181: (dbg) Run:  kubectl --context cilium-20210813002105-679351 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z localhost 8080"
--- PASS: TestNetworkPlugins/group/cilium/Localhost (0.24s)

                                                
                                    
x
+
TestNetworkPlugins/group/cilium/HairPin (0.25s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/cilium/HairPin
net_test.go:231: (dbg) Run:  kubectl --context cilium-20210813002105-679351 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z netcat 8080"
--- PASS: TestNetworkPlugins/group/cilium/HairPin (0.25s)

                                                
                                    
x
+
TestNetworkPlugins/group/bridge/Start (125.42s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/bridge/Start
net_test.go:98: (dbg) Run:  out/minikube-linux-amd64 start -p bridge-20210813002105-679351 --memory=2048 --alsologtostderr --wait=true --wait-timeout=5m --cni=bridge --driver=kvm2  --container-runtime=containerd
E0813 00:28:49.240674  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory

                                                
                                                
=== CONT  TestNetworkPlugins/group/bridge/Start
net_test.go:98: (dbg) Done: out/minikube-linux-amd64 start -p bridge-20210813002105-679351 --memory=2048 --alsologtostderr --wait=true --wait-timeout=5m --cni=bridge --driver=kvm2  --container-runtime=containerd: (2m5.421955677s)
--- PASS: TestNetworkPlugins/group/bridge/Start (125.42s)

                                                
                                    
x
+
TestNetworkPlugins/group/custom-weave/KubeletFlags (0.22s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/custom-weave/KubeletFlags
net_test.go:119: (dbg) Run:  out/minikube-linux-amd64 ssh -p custom-weave-20210813002105-679351 "pgrep -a kubelet"
--- PASS: TestNetworkPlugins/group/custom-weave/KubeletFlags (0.22s)

                                                
                                    
x
+
TestNetworkPlugins/group/custom-weave/NetCatPod (11.42s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/custom-weave/NetCatPod
net_test.go:131: (dbg) Run:  kubectl --context custom-weave-20210813002105-679351 replace --force -f testdata/netcat-deployment.yaml
net_test.go:145: (dbg) TestNetworkPlugins/group/custom-weave/NetCatPod: waiting 15m0s for pods matching "app=netcat" in namespace "default" ...
helpers_test.go:343: "netcat-66fbc655d5-dfwrs" [9ecc83dc-8dbb-4c2e-bf07-b4f368c5a7e0] Pending / Ready:ContainersNotReady (containers with unready status: [dnsutils]) / ContainersReady:ContainersNotReady (containers with unready status: [dnsutils])
helpers_test.go:343: "netcat-66fbc655d5-dfwrs" [9ecc83dc-8dbb-4c2e-bf07-b4f368c5a7e0] Running
net_test.go:145: (dbg) TestNetworkPlugins/group/custom-weave/NetCatPod: app=netcat healthy within 11.010194018s
--- PASS: TestNetworkPlugins/group/custom-weave/NetCatPod (11.42s)

                                                
                                    
x
+
TestNetworkPlugins/group/enable-default-cni/Start (129.53s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/enable-default-cni/Start
net_test.go:98: (dbg) Run:  out/minikube-linux-amd64 start -p enable-default-cni-20210813002105-679351 --memory=2048 --alsologtostderr --wait=true --wait-timeout=5m --enable-default-cni=true --driver=kvm2  --container-runtime=containerd

                                                
                                                
=== CONT  TestNetworkPlugins/group/enable-default-cni/Start
net_test.go:98: (dbg) Done: out/minikube-linux-amd64 start -p enable-default-cni-20210813002105-679351 --memory=2048 --alsologtostderr --wait=true --wait-timeout=5m --enable-default-cni=true --driver=kvm2  --container-runtime=containerd: (2m9.533463998s)
--- PASS: TestNetworkPlugins/group/enable-default-cni/Start (129.53s)

                                                
                                    
x
+
TestNetworkPlugins/group/flannel/ControllerPod (5.03s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/flannel/ControllerPod
net_test.go:106: (dbg) TestNetworkPlugins/group/flannel/ControllerPod: waiting 10m0s for pods matching "app=flannel" in namespace "kube-system" ...
helpers_test.go:343: "kube-flannel-ds-amd64-54t2l" [00ebd7d8-1567-4c78-b42b-052ce84754ca] Running
net_test.go:106: (dbg) TestNetworkPlugins/group/flannel/ControllerPod: app=flannel healthy within 5.026539021s
--- PASS: TestNetworkPlugins/group/flannel/ControllerPod (5.03s)

                                                
                                    
x
+
TestNetworkPlugins/group/flannel/KubeletFlags (0.27s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/flannel/KubeletFlags
net_test.go:119: (dbg) Run:  out/minikube-linux-amd64 ssh -p flannel-20210813002105-679351 "pgrep -a kubelet"
--- PASS: TestNetworkPlugins/group/flannel/KubeletFlags (0.27s)

                                                
                                    
x
+
TestNetworkPlugins/group/flannel/NetCatPod (13.51s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/flannel/NetCatPod
net_test.go:131: (dbg) Run:  kubectl --context flannel-20210813002105-679351 replace --force -f testdata/netcat-deployment.yaml
net_test.go:145: (dbg) TestNetworkPlugins/group/flannel/NetCatPod: waiting 15m0s for pods matching "app=netcat" in namespace "default" ...
helpers_test.go:343: "netcat-66fbc655d5-ztrjp" [7c4983a7-d649-458c-8a7f-f502c1a1dccf] Pending / Ready:ContainersNotReady (containers with unready status: [dnsutils]) / ContainersReady:ContainersNotReady (containers with unready status: [dnsutils])
E0813 00:29:12.516461  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
helpers_test.go:343: "netcat-66fbc655d5-ztrjp" [7c4983a7-d649-458c-8a7f-f502c1a1dccf] Running
net_test.go:145: (dbg) TestNetworkPlugins/group/flannel/NetCatPod: app=netcat healthy within 12.831020738s
--- PASS: TestNetworkPlugins/group/flannel/NetCatPod (13.51s)

                                                
                                    
x
+
TestNetworkPlugins/group/flannel/DNS (0.28s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/flannel/DNS
net_test.go:162: (dbg) Run:  kubectl --context flannel-20210813002105-679351 exec deployment/netcat -- nslookup kubernetes.default
--- PASS: TestNetworkPlugins/group/flannel/DNS (0.28s)

                                                
                                    
x
+
TestNetworkPlugins/group/flannel/Localhost (0.21s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/flannel/Localhost
net_test.go:181: (dbg) Run:  kubectl --context flannel-20210813002105-679351 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z localhost 8080"
--- PASS: TestNetworkPlugins/group/flannel/Localhost (0.21s)

                                                
                                    
x
+
TestNetworkPlugins/group/flannel/HairPin (0.21s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/flannel/HairPin
net_test.go:231: (dbg) Run:  kubectl --context flannel-20210813002105-679351 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z netcat 8080"
--- PASS: TestNetworkPlugins/group/flannel/HairPin (0.21s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/FirstStart (152.09s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/FirstStart
start_stop_delete_test.go:159: (dbg) Run:  out/minikube-linux-amd64 start -p old-k8s-version-20210813002926-679351 --memory=2200 --alsologtostderr --wait=true --kvm-network=default --kvm-qemu-uri=qemu:///system --disable-driver-mounts --keep-context=false --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.14.0
E0813 00:30:34.202516  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory
E0813 00:30:34.207849  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory
E0813 00:30:34.218165  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory
E0813 00:30:34.239007  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory
E0813 00:30:34.280194  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory
E0813 00:30:34.360587  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory
E0813 00:30:34.520886  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory
E0813 00:30:34.841457  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory
E0813 00:30:35.482503  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory
E0813 00:30:36.762848  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory
E0813 00:30:39.323299  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory
E0813 00:30:44.444374  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory

                                                
                                                
=== CONT  TestStartStop/group/old-k8s-version/serial/FirstStart
start_stop_delete_test.go:159: (dbg) Done: out/minikube-linux-amd64 start -p old-k8s-version-20210813002926-679351 --memory=2200 --alsologtostderr --wait=true --kvm-network=default --kvm-qemu-uri=qemu:///system --disable-driver-mounts --keep-context=false --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.14.0: (2m32.085607222s)
--- PASS: TestStartStop/group/old-k8s-version/serial/FirstStart (152.09s)

                                                
                                    
x
+
TestNetworkPlugins/group/bridge/KubeletFlags (0.22s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/bridge/KubeletFlags
net_test.go:119: (dbg) Run:  out/minikube-linux-amd64 ssh -p bridge-20210813002105-679351 "pgrep -a kubelet"
--- PASS: TestNetworkPlugins/group/bridge/KubeletFlags (0.22s)

                                                
                                    
x
+
TestNetworkPlugins/group/bridge/NetCatPod (9.6s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/bridge/NetCatPod
net_test.go:131: (dbg) Run:  kubectl --context bridge-20210813002105-679351 replace --force -f testdata/netcat-deployment.yaml
net_test.go:145: (dbg) TestNetworkPlugins/group/bridge/NetCatPod: waiting 15m0s for pods matching "app=netcat" in namespace "default" ...
helpers_test.go:343: "netcat-66fbc655d5-s5pj2" [6bc67ea6-ceab-4598-8bad-a25c9f663d19] Pending / Ready:ContainersNotReady (containers with unready status: [dnsutils]) / ContainersReady:ContainersNotReady (containers with unready status: [dnsutils])
E0813 00:30:54.685098  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory
helpers_test.go:343: "netcat-66fbc655d5-s5pj2" [6bc67ea6-ceab-4598-8bad-a25c9f663d19] Running
net_test.go:145: (dbg) TestNetworkPlugins/group/bridge/NetCatPod: app=netcat healthy within 9.009883682s
--- PASS: TestNetworkPlugins/group/bridge/NetCatPod (9.60s)

                                                
                                    
x
+
TestNetworkPlugins/group/bridge/DNS (0.25s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/bridge/DNS
net_test.go:162: (dbg) Run:  kubectl --context bridge-20210813002105-679351 exec deployment/netcat -- nslookup kubernetes.default
--- PASS: TestNetworkPlugins/group/bridge/DNS (0.25s)

                                                
                                    
x
+
TestNetworkPlugins/group/bridge/Localhost (0.19s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/bridge/Localhost
net_test.go:181: (dbg) Run:  kubectl --context bridge-20210813002105-679351 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z localhost 8080"
--- PASS: TestNetworkPlugins/group/bridge/Localhost (0.19s)

                                                
                                    
x
+
TestNetworkPlugins/group/bridge/HairPin (0.21s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/bridge/HairPin
net_test.go:231: (dbg) Run:  kubectl --context bridge-20210813002105-679351 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z netcat 8080"
--- PASS: TestNetworkPlugins/group/bridge/HairPin (0.21s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/FirstStart (117.14s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/FirstStart
start_stop_delete_test.go:159: (dbg) Run:  out/minikube-linux-amd64 start -p embed-certs-20210813003103-679351 --memory=2200 --alsologtostderr --wait=true --embed-certs --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.21.3
E0813 00:31:09.471657  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory

                                                
                                                
=== CONT  TestStartStop/group/embed-certs/serial/FirstStart
start_stop_delete_test.go:159: (dbg) Done: out/minikube-linux-amd64 start -p embed-certs-20210813003103-679351 --memory=2200 --alsologtostderr --wait=true --embed-certs --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.21.3: (1m57.138048719s)
--- PASS: TestStartStop/group/embed-certs/serial/FirstStart (117.14s)

                                                
                                    
x
+
TestNetworkPlugins/group/enable-default-cni/KubeletFlags (0.24s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/enable-default-cni/KubeletFlags
net_test.go:119: (dbg) Run:  out/minikube-linux-amd64 ssh -p enable-default-cni-20210813002105-679351 "pgrep -a kubelet"
--- PASS: TestNetworkPlugins/group/enable-default-cni/KubeletFlags (0.24s)

                                                
                                    
x
+
TestNetworkPlugins/group/enable-default-cni/NetCatPod (11.59s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/enable-default-cni/NetCatPod
net_test.go:131: (dbg) Run:  kubectl --context enable-default-cni-20210813002105-679351 replace --force -f testdata/netcat-deployment.yaml
net_test.go:145: (dbg) TestNetworkPlugins/group/enable-default-cni/NetCatPod: waiting 15m0s for pods matching "app=netcat" in namespace "default" ...
helpers_test.go:343: "netcat-66fbc655d5-q9w8c" [99c7f9d0-515c-451c-83ab-5225e2f234df] Pending / Ready:ContainersNotReady (containers with unready status: [dnsutils]) / ContainersReady:ContainersNotReady (containers with unready status: [dnsutils])
E0813 00:31:15.166082  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory
helpers_test.go:343: "netcat-66fbc655d5-q9w8c" [99c7f9d0-515c-451c-83ab-5225e2f234df] Running
net_test.go:145: (dbg) TestNetworkPlugins/group/enable-default-cni/NetCatPod: app=netcat healthy within 11.014778602s
--- PASS: TestNetworkPlugins/group/enable-default-cni/NetCatPod (11.59s)

                                                
                                    
x
+
TestNetworkPlugins/group/enable-default-cni/DNS (0.24s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/enable-default-cni/DNS
net_test.go:162: (dbg) Run:  kubectl --context enable-default-cni-20210813002105-679351 exec deployment/netcat -- nslookup kubernetes.default
--- PASS: TestNetworkPlugins/group/enable-default-cni/DNS (0.24s)

                                                
                                    
x
+
TestNetworkPlugins/group/enable-default-cni/Localhost (0.19s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/enable-default-cni/Localhost
net_test.go:181: (dbg) Run:  kubectl --context enable-default-cni-20210813002105-679351 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z localhost 8080"
--- PASS: TestNetworkPlugins/group/enable-default-cni/Localhost (0.19s)

                                                
                                    
x
+
TestNetworkPlugins/group/enable-default-cni/HairPin (0.19s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/enable-default-cni/HairPin
net_test.go:231: (dbg) Run:  kubectl --context enable-default-cni-20210813002105-679351 exec deployment/netcat -- /bin/sh -c "nc -w 5 -i 5 -z netcat 8080"
--- PASS: TestNetworkPlugins/group/enable-default-cni/HairPin (0.19s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/FirstStart (142.96s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/FirstStart
start_stop_delete_test.go:159: (dbg) Run:  out/minikube-linux-amd64 start -p no-preload-20210813003127-679351 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.22.0-rc.0
E0813 00:31:28.780365  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory
E0813 00:31:28.787287  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory
E0813 00:31:28.797558  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory
E0813 00:31:28.817877  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory
E0813 00:31:28.858174  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory
E0813 00:31:28.939161  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory
E0813 00:31:29.100063  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory
E0813 00:31:29.421039  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory
E0813 00:31:30.065658  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory
E0813 00:31:31.346168  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory
E0813 00:31:33.906619  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory
E0813 00:31:39.027256  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory
E0813 00:31:49.893804  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory
E0813 00:31:56.126713  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory

                                                
                                                
=== CONT  TestStartStop/group/no-preload/serial/FirstStart
start_stop_delete_test.go:159: (dbg) Done: out/minikube-linux-amd64 start -p no-preload-20210813003127-679351 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.22.0-rc.0: (2m22.964476723s)
--- PASS: TestStartStop/group/no-preload/serial/FirstStart (142.96s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/DeployApp (10.82s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/DeployApp
start_stop_delete_test.go:169: (dbg) Run:  kubectl --context old-k8s-version-20210813002926-679351 create -f testdata/busybox.yaml
start_stop_delete_test.go:169: (dbg) Done: kubectl --context old-k8s-version-20210813002926-679351 create -f testdata/busybox.yaml: (1.542591895s)
start_stop_delete_test.go:169: (dbg) TestStartStop/group/old-k8s-version/serial/DeployApp: waiting 8m0s for pods matching "integration-test=busybox" in namespace "default" ...
helpers_test.go:343: "busybox" [df743d47-fbcd-11eb-a0de-525400670fb0] Pending
helpers_test.go:343: "busybox" [df743d47-fbcd-11eb-a0de-525400670fb0] Pending / Ready:ContainersNotReady (containers with unready status: [busybox]) / ContainersReady:ContainersNotReady (containers with unready status: [busybox])
helpers_test.go:343: "busybox" [df743d47-fbcd-11eb-a0de-525400670fb0] Running
start_stop_delete_test.go:169: (dbg) TestStartStop/group/old-k8s-version/serial/DeployApp: integration-test=busybox healthy within 9.063747888s
start_stop_delete_test.go:169: (dbg) Run:  kubectl --context old-k8s-version-20210813002926-679351 exec busybox -- /bin/sh -c "ulimit -n"
--- PASS: TestStartStop/group/old-k8s-version/serial/DeployApp (10.82s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/EnableAddonWhileActive (1.31s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/EnableAddonWhileActive
start_stop_delete_test.go:178: (dbg) Run:  out/minikube-linux-amd64 addons enable metrics-server -p old-k8s-version-20210813002926-679351 --images=MetricsServer=k8s.gcr.io/echoserver:1.4 --registries=MetricsServer=fake.domain
E0813 00:32:10.374916  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory
start_stop_delete_test.go:178: (dbg) Done: out/minikube-linux-amd64 addons enable metrics-server -p old-k8s-version-20210813002926-679351 --images=MetricsServer=k8s.gcr.io/echoserver:1.4 --registries=MetricsServer=fake.domain: (1.184712336s)
start_stop_delete_test.go:188: (dbg) Run:  kubectl --context old-k8s-version-20210813002926-679351 describe deploy/metrics-server -n kube-system
--- PASS: TestStartStop/group/old-k8s-version/serial/EnableAddonWhileActive (1.31s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/Stop (92.55s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/Stop
start_stop_delete_test.go:201: (dbg) Run:  out/minikube-linux-amd64 stop -p old-k8s-version-20210813002926-679351 --alsologtostderr -v=3
E0813 00:32:51.335299  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory

                                                
                                                
=== CONT  TestStartStop/group/old-k8s-version/serial/Stop
start_stop_delete_test.go:201: (dbg) Done: out/minikube-linux-amd64 stop -p old-k8s-version-20210813002926-679351 --alsologtostderr -v=3: (1m32.54655366s)
--- PASS: TestStartStop/group/old-k8s-version/serial/Stop (92.55s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/DeployApp (8.62s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/DeployApp
start_stop_delete_test.go:169: (dbg) Run:  kubectl --context embed-certs-20210813003103-679351 create -f testdata/busybox.yaml
start_stop_delete_test.go:169: (dbg) TestStartStop/group/embed-certs/serial/DeployApp: waiting 8m0s for pods matching "integration-test=busybox" in namespace "default" ...
helpers_test.go:343: "busybox" [d954e181-432b-4305-9826-a6f0cfce237d] Pending
helpers_test.go:343: "busybox" [d954e181-432b-4305-9826-a6f0cfce237d] Pending / Ready:ContainersNotReady (containers with unready status: [busybox]) / ContainersReady:ContainersNotReady (containers with unready status: [busybox])
helpers_test.go:343: "busybox" [d954e181-432b-4305-9826-a6f0cfce237d] Running
start_stop_delete_test.go:169: (dbg) TestStartStop/group/embed-certs/serial/DeployApp: integration-test=busybox healthy within 8.026716672s
start_stop_delete_test.go:169: (dbg) Run:  kubectl --context embed-certs-20210813003103-679351 exec busybox -- /bin/sh -c "ulimit -n"
--- PASS: TestStartStop/group/embed-certs/serial/DeployApp (8.62s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/EnableAddonWhileActive (1.04s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/EnableAddonWhileActive
start_stop_delete_test.go:178: (dbg) Run:  out/minikube-linux-amd64 addons enable metrics-server -p embed-certs-20210813003103-679351 --images=MetricsServer=k8s.gcr.io/echoserver:1.4 --registries=MetricsServer=fake.domain
start_stop_delete_test.go:188: (dbg) Run:  kubectl --context embed-certs-20210813003103-679351 describe deploy/metrics-server -n kube-system
--- PASS: TestStartStop/group/embed-certs/serial/EnableAddonWhileActive (1.04s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/Stop (92.64s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/Stop
start_stop_delete_test.go:201: (dbg) Run:  out/minikube-linux-amd64 stop -p embed-certs-20210813003103-679351 --alsologtostderr -v=3
E0813 00:33:18.047688  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:27.345559  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:27.350834  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:27.361088  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:27.381600  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:27.421871  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:27.502180  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:27.663303  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:27.983900  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:28.624948  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:29.905916  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:32.466991  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:37.588108  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory

                                                
                                                
=== CONT  TestStartStop/group/embed-certs/serial/Stop
start_stop_delete_test.go:201: (dbg) Done: out/minikube-linux-amd64 stop -p embed-certs-20210813003103-679351 --alsologtostderr -v=3: (1m32.644736738s)
--- PASS: TestStartStop/group/embed-certs/serial/Stop (92.64s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/EnableAddonAfterStop (0.17s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/EnableAddonAfterStop
start_stop_delete_test.go:212: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Host}} -p old-k8s-version-20210813002926-679351 -n old-k8s-version-20210813002926-679351
start_stop_delete_test.go:212: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.Host}} -p old-k8s-version-20210813002926-679351 -n old-k8s-version-20210813002926-679351: exit status 7 (73.423865ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:212: status error: exit status 7 (may be ok)
start_stop_delete_test.go:219: (dbg) Run:  out/minikube-linux-amd64 addons enable dashboard -p old-k8s-version-20210813002926-679351 --images=MetricsScraper=k8s.gcr.io/echoserver:1.4
--- PASS: TestStartStop/group/old-k8s-version/serial/EnableAddonAfterStop (0.17s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/SecondStart (540.93s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/SecondStart
start_stop_delete_test.go:229: (dbg) Run:  out/minikube-linux-amd64 start -p old-k8s-version-20210813002926-679351 --memory=2200 --alsologtostderr --wait=true --kvm-network=default --kvm-qemu-uri=qemu:///system --disable-driver-mounts --keep-context=false --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.14.0
E0813 00:33:47.828337  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:49.239943  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory

                                                
                                                
=== CONT  TestStartStop/group/old-k8s-version/serial/SecondStart
start_stop_delete_test.go:229: (dbg) Done: out/minikube-linux-amd64 start -p old-k8s-version-20210813002926-679351 --memory=2200 --alsologtostderr --wait=true --kvm-network=default --kvm-qemu-uri=qemu:///system --disable-driver-mounts --keep-context=false --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.14.0: (9m0.656594817s)
start_stop_delete_test.go:235: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Host}} -p old-k8s-version-20210813002926-679351 -n old-k8s-version-20210813002926-679351
--- PASS: TestStartStop/group/old-k8s-version/serial/SecondStart (540.93s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/DeployApp (9.65s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/DeployApp
start_stop_delete_test.go:169: (dbg) Run:  kubectl --context no-preload-20210813003127-679351 create -f testdata/busybox.yaml
start_stop_delete_test.go:169: (dbg) TestStartStop/group/no-preload/serial/DeployApp: waiting 8m0s for pods matching "integration-test=busybox" in namespace "default" ...
helpers_test.go:343: "busybox" [27da7e0d-0ba5-4e5b-89f8-01a13dc50d55] Pending
helpers_test.go:343: "busybox" [27da7e0d-0ba5-4e5b-89f8-01a13dc50d55] Pending / Ready:ContainersNotReady (containers with unready status: [busybox]) / ContainersReady:ContainersNotReady (containers with unready status: [busybox])
E0813 00:33:52.256864  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:52.262274  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:52.272522  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:52.292787  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:52.333053  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:52.413579  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:52.574016  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:52.894983  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:53.535893  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory
helpers_test.go:343: "busybox" [27da7e0d-0ba5-4e5b-89f8-01a13dc50d55] Running
E0813 00:33:54.817041  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory
E0813 00:33:57.377842  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory
start_stop_delete_test.go:169: (dbg) TestStartStop/group/no-preload/serial/DeployApp: integration-test=busybox healthy within 9.042305279s
start_stop_delete_test.go:169: (dbg) Run:  kubectl --context no-preload-20210813003127-679351 exec busybox -- /bin/sh -c "ulimit -n"
--- PASS: TestStartStop/group/no-preload/serial/DeployApp (9.65s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/EnableAddonWhileActive (1.18s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/EnableAddonWhileActive
start_stop_delete_test.go:178: (dbg) Run:  out/minikube-linux-amd64 addons enable metrics-server -p no-preload-20210813003127-679351 --images=MetricsServer=k8s.gcr.io/echoserver:1.4 --registries=MetricsServer=fake.domain
start_stop_delete_test.go:178: (dbg) Done: out/minikube-linux-amd64 addons enable metrics-server -p no-preload-20210813003127-679351 --images=MetricsServer=k8s.gcr.io/echoserver:1.4 --registries=MetricsServer=fake.domain: (1.064609928s)
start_stop_delete_test.go:188: (dbg) Run:  kubectl --context no-preload-20210813003127-679351 describe deploy/metrics-server -n kube-system
--- PASS: TestStartStop/group/no-preload/serial/EnableAddonWhileActive (1.18s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/Stop (92.66s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/Stop
start_stop_delete_test.go:201: (dbg) Run:  out/minikube-linux-amd64 stop -p no-preload-20210813003127-679351 --alsologtostderr -v=3
E0813 00:34:02.498229  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory
E0813 00:34:06.150284  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory
E0813 00:34:06.155612  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory
E0813 00:34:06.165837  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory
E0813 00:34:06.186143  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory
E0813 00:34:06.226439  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory
E0813 00:34:06.306805  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory
E0813 00:34:06.467263  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory
E0813 00:34:06.787978  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory
E0813 00:34:07.428728  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory
E0813 00:34:08.309412  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory
E0813 00:34:08.709412  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory
E0813 00:34:11.269719  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory
E0813 00:34:12.739141  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory
E0813 00:34:13.255827  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory
E0813 00:34:16.390670  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory
E0813 00:34:26.631646  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory
E0813 00:34:36.376214  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory

                                                
                                                
=== CONT  TestStartStop/group/no-preload/serial/Stop
start_stop_delete_test.go:201: (dbg) Done: out/minikube-linux-amd64 stop -p no-preload-20210813003127-679351 --alsologtostderr -v=3: (1m32.66288526s)
--- PASS: TestStartStop/group/no-preload/serial/Stop (92.66s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/EnableAddonAfterStop (0.16s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/EnableAddonAfterStop
start_stop_delete_test.go:212: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Host}} -p embed-certs-20210813003103-679351 -n embed-certs-20210813003103-679351
start_stop_delete_test.go:212: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.Host}} -p embed-certs-20210813003103-679351 -n embed-certs-20210813003103-679351: exit status 7 (70.859183ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:212: status error: exit status 7 (may be ok)
start_stop_delete_test.go:219: (dbg) Run:  out/minikube-linux-amd64 addons enable dashboard -p embed-certs-20210813003103-679351 --images=MetricsScraper=k8s.gcr.io/echoserver:1.4
--- PASS: TestStartStop/group/embed-certs/serial/EnableAddonAfterStop (0.16s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/SecondStart (439.33s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/SecondStart
start_stop_delete_test.go:229: (dbg) Run:  out/minikube-linux-amd64 start -p embed-certs-20210813003103-679351 --memory=2200 --alsologtostderr --wait=true --embed-certs --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.21.3
E0813 00:34:47.112480  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory
E0813 00:34:49.270515  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory
E0813 00:35:17.337393  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory
E0813 00:35:28.073007  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory

                                                
                                                
=== CONT  TestStartStop/group/embed-certs/serial/SecondStart
start_stop_delete_test.go:229: (dbg) Done: out/minikube-linux-amd64 start -p embed-certs-20210813003103-679351 --memory=2200 --alsologtostderr --wait=true --embed-certs --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.21.3: (7m18.908014263s)
start_stop_delete_test.go:235: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Host}} -p embed-certs-20210813003103-679351 -n embed-certs-20210813003103-679351
--- PASS: TestStartStop/group/embed-certs/serial/SecondStart (439.33s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-different-port/serial/FirstStart (112.48s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-different-port/serial/FirstStart
start_stop_delete_test.go:159: (dbg) Run:  out/minikube-linux-amd64 start -p default-k8s-different-port-20210813003533-679351 --memory=2200 --alsologtostderr --wait=true --apiserver-port=8444 --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.21.3

                                                
                                                
=== CONT  TestStartStop/group/default-k8s-different-port/serial/FirstStart
start_stop_delete_test.go:159: (dbg) Done: out/minikube-linux-amd64 start -p default-k8s-different-port-20210813003533-679351 --memory=2200 --alsologtostderr --wait=true --apiserver-port=8444 --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.21.3: (1m52.481434231s)
--- PASS: TestStartStop/group/default-k8s-different-port/serial/FirstStart (112.48s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/EnableAddonAfterStop (0.16s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/EnableAddonAfterStop
start_stop_delete_test.go:212: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Host}} -p no-preload-20210813003127-679351 -n no-preload-20210813003127-679351
start_stop_delete_test.go:212: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.Host}} -p no-preload-20210813003127-679351 -n no-preload-20210813003127-679351: exit status 7 (69.389549ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:212: status error: exit status 7 (may be ok)
start_stop_delete_test.go:219: (dbg) Run:  out/minikube-linux-amd64 addons enable dashboard -p no-preload-20210813003127-679351 --images=MetricsScraper=k8s.gcr.io/echoserver:1.4
E0813 00:35:34.202854  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory
--- PASS: TestStartStop/group/no-preload/serial/EnableAddonAfterStop (0.16s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/SecondStart (356.73s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/SecondStart
start_stop_delete_test.go:229: (dbg) Run:  out/minikube-linux-amd64 start -p no-preload-20210813003127-679351 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.22.0-rc.0
E0813 00:35:52.716731  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/bridge-20210813002105-679351/client.crt: no such file or directory
E0813 00:35:52.722074  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/bridge-20210813002105-679351/client.crt: no such file or directory
E0813 00:35:52.732446  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/bridge-20210813002105-679351/client.crt: no such file or directory
E0813 00:35:52.752764  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/bridge-20210813002105-679351/client.crt: no such file or directory
E0813 00:35:52.793256  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/bridge-20210813002105-679351/client.crt: no such file or directory
E0813 00:35:52.874390  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/bridge-20210813002105-679351/client.crt: no such file or directory
E0813 00:35:53.034838  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/bridge-20210813002105-679351/client.crt: no such file or directory
E0813 00:35:53.355824  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/bridge-20210813002105-679351/client.crt: no such file or directory
E0813 00:35:53.996949  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/bridge-20210813002105-679351/client.crt: no such file or directory
E0813 00:35:55.277653  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/bridge-20210813002105-679351/client.crt: no such file or directory
E0813 00:35:57.838750  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/bridge-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:01.888280  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:02.959825  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/bridge-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:09.471689  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0813 00:36:11.191636  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:13.200169  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/bridge-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:14.768446  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/enable-default-cni-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:14.773764  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/enable-default-cni-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:14.784031  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/enable-default-cni-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:14.804272  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/enable-default-cni-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:14.844622  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/enable-default-cni-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:14.924981  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/enable-default-cni-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:15.085760  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/enable-default-cni-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:15.406356  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/enable-default-cni-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:16.046651  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/enable-default-cni-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:17.326896  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/enable-default-cni-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:19.887150  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/enable-default-cni-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:25.007428  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/enable-default-cni-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:28.780421  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:33.680963  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/bridge-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:35.248679  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/enable-default-cni-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:39.258081  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:49.993847  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:55.729867  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/enable-default-cni-20210813002105-679351/client.crt: no such file or directory
E0813 00:36:59.738120  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory
E0813 00:37:14.642099  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/bridge-20210813002105-679351/client.crt: no such file or directory

                                                
                                                
=== CONT  TestStartStop/group/no-preload/serial/SecondStart
start_stop_delete_test.go:229: (dbg) Done: out/minikube-linux-amd64 start -p no-preload-20210813003127-679351 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.22.0-rc.0: (5m56.321512424s)
start_stop_delete_test.go:235: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Host}} -p no-preload-20210813003127-679351 -n no-preload-20210813003127-679351
--- PASS: TestStartStop/group/no-preload/serial/SecondStart (356.73s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-different-port/serial/DeployApp (8.65s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-different-port/serial/DeployApp
start_stop_delete_test.go:169: (dbg) Run:  kubectl --context default-k8s-different-port-20210813003533-679351 create -f testdata/busybox.yaml
start_stop_delete_test.go:169: (dbg) TestStartStop/group/default-k8s-different-port/serial/DeployApp: waiting 8m0s for pods matching "integration-test=busybox" in namespace "default" ...
helpers_test.go:343: "busybox" [58140cfb-c4af-446f-9c97-62887da87156] Pending / Ready:ContainersNotReady (containers with unready status: [busybox]) / ContainersReady:ContainersNotReady (containers with unready status: [busybox])
helpers_test.go:343: "busybox" [58140cfb-c4af-446f-9c97-62887da87156] Running
start_stop_delete_test.go:169: (dbg) TestStartStop/group/default-k8s-different-port/serial/DeployApp: integration-test=busybox healthy within 8.039933215s
start_stop_delete_test.go:169: (dbg) Run:  kubectl --context default-k8s-different-port-20210813003533-679351 exec busybox -- /bin/sh -c "ulimit -n"
--- PASS: TestStartStop/group/default-k8s-different-port/serial/DeployApp (8.65s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-different-port/serial/EnableAddonWhileActive (1.07s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-different-port/serial/EnableAddonWhileActive
start_stop_delete_test.go:178: (dbg) Run:  out/minikube-linux-amd64 addons enable metrics-server -p default-k8s-different-port-20210813003533-679351 --images=MetricsServer=k8s.gcr.io/echoserver:1.4 --registries=MetricsServer=fake.domain
start_stop_delete_test.go:188: (dbg) Run:  kubectl --context default-k8s-different-port-20210813003533-679351 describe deploy/metrics-server -n kube-system
--- PASS: TestStartStop/group/default-k8s-different-port/serial/EnableAddonWhileActive (1.07s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-different-port/serial/Stop (92.52s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-different-port/serial/Stop
start_stop_delete_test.go:201: (dbg) Run:  out/minikube-linux-amd64 stop -p default-k8s-different-port-20210813003533-679351 --alsologtostderr -v=3
E0813 00:37:36.690641  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/enable-default-cni-20210813002105-679351/client.crt: no such file or directory
E0813 00:38:27.346236  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory
E0813 00:38:33.178651  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0813 00:38:36.563077  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/bridge-20210813002105-679351/client.crt: no such file or directory
E0813 00:38:49.239626  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0813 00:38:52.256967  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory
E0813 00:38:55.032064  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory
E0813 00:38:58.611116  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/enable-default-cni-20210813002105-679351/client.crt: no such file or directory
E0813 00:39:06.151029  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory
start_stop_delete_test.go:201: (dbg) Done: out/minikube-linux-amd64 stop -p default-k8s-different-port-20210813003533-679351 --alsologtostderr -v=3: (1m32.515079741s)
--- PASS: TestStartStop/group/default-k8s-different-port/serial/Stop (92.52s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-different-port/serial/EnableAddonAfterStop (0.16s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-different-port/serial/EnableAddonAfterStop
start_stop_delete_test.go:212: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Host}} -p default-k8s-different-port-20210813003533-679351 -n default-k8s-different-port-20210813003533-679351
start_stop_delete_test.go:212: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.Host}} -p default-k8s-different-port-20210813003533-679351 -n default-k8s-different-port-20210813003533-679351: exit status 7 (71.580029ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:212: status error: exit status 7 (may be ok)
start_stop_delete_test.go:219: (dbg) Run:  out/minikube-linux-amd64 addons enable dashboard -p default-k8s-different-port-20210813003533-679351 --images=MetricsScraper=k8s.gcr.io/echoserver:1.4
--- PASS: TestStartStop/group/default-k8s-different-port/serial/EnableAddonAfterStop (0.16s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-different-port/serial/SecondStart (452.72s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-different-port/serial/SecondStart
start_stop_delete_test.go:229: (dbg) Run:  out/minikube-linux-amd64 start -p default-k8s-different-port-20210813003533-679351 --memory=2200 --alsologtostderr --wait=true --apiserver-port=8444 --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.21.3
E0813 00:39:23.099013  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory
E0813 00:39:33.834802  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory
E0813 00:40:34.202925  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/auto-20210813002105-679351/client.crt: no such file or directory
E0813 00:40:52.716762  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/bridge-20210813002105-679351/client.crt: no such file or directory
E0813 00:41:09.471283  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/functional-20210812234826-679351/client.crt: no such file or directory
E0813 00:41:14.768707  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/enable-default-cni-20210813002105-679351/client.crt: no such file or directory
E0813 00:41:20.403479  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/bridge-20210813002105-679351/client.crt: no such file or directory
E0813 00:41:28.780433  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/kindnet-20210813002105-679351/client.crt: no such file or directory

                                                
                                                
=== CONT  TestStartStop/group/default-k8s-different-port/serial/SecondStart
start_stop_delete_test.go:229: (dbg) Done: out/minikube-linux-amd64 start -p default-k8s-different-port-20210813003533-679351 --memory=2200 --alsologtostderr --wait=true --apiserver-port=8444 --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.21.3: (7m32.468367945s)
start_stop_delete_test.go:235: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Host}} -p default-k8s-different-port-20210813003533-679351 -n default-k8s-different-port-20210813003533-679351
--- PASS: TestStartStop/group/default-k8s-different-port/serial/SecondStart (452.72s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/UserAppExistsAfterStop (14.03s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/UserAppExistsAfterStop
start_stop_delete_test.go:247: (dbg) TestStartStop/group/no-preload/serial/UserAppExistsAfterStop: waiting 9m0s for pods matching "k8s-app=kubernetes-dashboard" in namespace "kubernetes-dashboard" ...
helpers_test.go:343: "kubernetes-dashboard-6fcdf4f6d-kz6vz" [f1e40ee1-7776-4f7a-a606-284e04907933] Pending / Ready:ContainersNotReady (containers with unready status: [kubernetes-dashboard]) / ContainersReady:ContainersNotReady (containers with unready status: [kubernetes-dashboard])
helpers_test.go:343: "kubernetes-dashboard-6fcdf4f6d-kz6vz" [f1e40ee1-7776-4f7a-a606-284e04907933] Running
E0813 00:41:42.452211  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/enable-default-cni-20210813002105-679351/client.crt: no such file or directory
start_stop_delete_test.go:247: (dbg) TestStartStop/group/no-preload/serial/UserAppExistsAfterStop: k8s-app=kubernetes-dashboard healthy within 14.028550364s
--- PASS: TestStartStop/group/no-preload/serial/UserAppExistsAfterStop (14.03s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/AddonExistsAfterStop (5.11s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/AddonExistsAfterStop
start_stop_delete_test.go:260: (dbg) TestStartStop/group/no-preload/serial/AddonExistsAfterStop: waiting 9m0s for pods matching "k8s-app=kubernetes-dashboard" in namespace "kubernetes-dashboard" ...
helpers_test.go:343: "kubernetes-dashboard-6fcdf4f6d-kz6vz" [f1e40ee1-7776-4f7a-a606-284e04907933] Running
start_stop_delete_test.go:260: (dbg) TestStartStop/group/no-preload/serial/AddonExistsAfterStop: k8s-app=kubernetes-dashboard healthy within 5.009825626s
start_stop_delete_test.go:264: (dbg) Run:  kubectl --context no-preload-20210813003127-679351 describe deploy/dashboard-metrics-scraper -n kubernetes-dashboard
--- PASS: TestStartStop/group/no-preload/serial/AddonExistsAfterStop (5.11s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/VerifyKubernetesImages (0.25s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/VerifyKubernetesImages
start_stop_delete_test.go:277: (dbg) Run:  out/minikube-linux-amd64 ssh -p no-preload-20210813003127-679351 "sudo crictl images -o json"
start_stop_delete_test.go:277: Found non-minikube image: library/busybox:1.28.4-glibc
--- PASS: TestStartStop/group/no-preload/serial/VerifyKubernetesImages (0.25s)

                                                
                                    
x
+
TestStartStop/group/no-preload/serial/Pause (2.75s)

                                                
                                                
=== RUN   TestStartStop/group/no-preload/serial/Pause
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 pause -p no-preload-20210813003127-679351 --alsologtostderr -v=1
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.APIServer}} -p no-preload-20210813003127-679351 -n no-preload-20210813003127-679351
start_stop_delete_test.go:284: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.APIServer}} -p no-preload-20210813003127-679351 -n no-preload-20210813003127-679351: exit status 2 (255.948748ms)

                                                
                                                
-- stdout --
	Paused

                                                
                                                
-- /stdout --
start_stop_delete_test.go:284: status error: exit status 2 (may be ok)
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Kubelet}} -p no-preload-20210813003127-679351 -n no-preload-20210813003127-679351
start_stop_delete_test.go:284: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.Kubelet}} -p no-preload-20210813003127-679351 -n no-preload-20210813003127-679351: exit status 2 (254.004096ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:284: status error: exit status 2 (may be ok)
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 unpause -p no-preload-20210813003127-679351 --alsologtostderr -v=1
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.APIServer}} -p no-preload-20210813003127-679351 -n no-preload-20210813003127-679351
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Kubelet}} -p no-preload-20210813003127-679351 -n no-preload-20210813003127-679351
--- PASS: TestStartStop/group/no-preload/serial/Pause (2.75s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/FirstStart (86.32s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/FirstStart
start_stop_delete_test.go:159: (dbg) Run:  out/minikube-linux-amd64 start -p newest-cni-20210813004154-679351 --memory=2200 --alsologtostderr --wait=apiserver,system_pods,default_sa --feature-gates ServerSideApply=true --network-plugin=cni --extra-config=kubelet.network-plugin=cni --extra-config=kubeadm.pod-network-cidr=192.168.111.111/16 --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.22.0-rc.0

                                                
                                                
=== CONT  TestStartStop/group/newest-cni/serial/FirstStart
start_stop_delete_test.go:159: (dbg) Done: out/minikube-linux-amd64 start -p newest-cni-20210813004154-679351 --memory=2200 --alsologtostderr --wait=apiserver,system_pods,default_sa --feature-gates ServerSideApply=true --network-plugin=cni --extra-config=kubelet.network-plugin=cni --extra-config=kubeadm.pod-network-cidr=192.168.111.111/16 --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.22.0-rc.0: (1m26.323829458s)
--- PASS: TestStartStop/group/newest-cni/serial/FirstStart (86.32s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/UserAppExistsAfterStop (6.03s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/UserAppExistsAfterStop
start_stop_delete_test.go:247: (dbg) TestStartStop/group/embed-certs/serial/UserAppExistsAfterStop: waiting 9m0s for pods matching "k8s-app=kubernetes-dashboard" in namespace "kubernetes-dashboard" ...
helpers_test.go:343: "kubernetes-dashboard-6fcdf4f6d-8szr7" [562c34d8-adf4-42a5-bd59-8d1e11e2638c] Pending / Ready:ContainersNotReady (containers with unready status: [kubernetes-dashboard]) / ContainersReady:ContainersNotReady (containers with unready status: [kubernetes-dashboard])
helpers_test.go:343: "kubernetes-dashboard-6fcdf4f6d-8szr7" [562c34d8-adf4-42a5-bd59-8d1e11e2638c] Running
start_stop_delete_test.go:247: (dbg) TestStartStop/group/embed-certs/serial/UserAppExistsAfterStop: k8s-app=kubernetes-dashboard healthy within 6.026469463s
--- PASS: TestStartStop/group/embed-certs/serial/UserAppExistsAfterStop (6.03s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/AddonExistsAfterStop (5.1s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/AddonExistsAfterStop
start_stop_delete_test.go:260: (dbg) TestStartStop/group/embed-certs/serial/AddonExistsAfterStop: waiting 9m0s for pods matching "k8s-app=kubernetes-dashboard" in namespace "kubernetes-dashboard" ...
helpers_test.go:343: "kubernetes-dashboard-6fcdf4f6d-8szr7" [562c34d8-adf4-42a5-bd59-8d1e11e2638c] Running
start_stop_delete_test.go:260: (dbg) TestStartStop/group/embed-certs/serial/AddonExistsAfterStop: k8s-app=kubernetes-dashboard healthy within 5.009787658s
start_stop_delete_test.go:264: (dbg) Run:  kubectl --context embed-certs-20210813003103-679351 describe deploy/dashboard-metrics-scraper -n kubernetes-dashboard
--- PASS: TestStartStop/group/embed-certs/serial/AddonExistsAfterStop (5.10s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/VerifyKubernetesImages (0.24s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/VerifyKubernetesImages
start_stop_delete_test.go:277: (dbg) Run:  out/minikube-linux-amd64 ssh -p embed-certs-20210813003103-679351 "sudo crictl images -o json"
start_stop_delete_test.go:277: Found non-minikube image: kindest/kindnetd:v20210326-1e038dc5
start_stop_delete_test.go:277: Found non-minikube image: library/busybox:1.28.4-glibc
--- PASS: TestStartStop/group/embed-certs/serial/VerifyKubernetesImages (0.24s)

                                                
                                    
x
+
TestStartStop/group/embed-certs/serial/Pause (2.61s)

                                                
                                                
=== RUN   TestStartStop/group/embed-certs/serial/Pause
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 pause -p embed-certs-20210813003103-679351 --alsologtostderr -v=1
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.APIServer}} -p embed-certs-20210813003103-679351 -n embed-certs-20210813003103-679351
start_stop_delete_test.go:284: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.APIServer}} -p embed-certs-20210813003103-679351 -n embed-certs-20210813003103-679351: exit status 2 (247.609057ms)

                                                
                                                
-- stdout --
	Paused

                                                
                                                
-- /stdout --
start_stop_delete_test.go:284: status error: exit status 2 (may be ok)
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Kubelet}} -p embed-certs-20210813003103-679351 -n embed-certs-20210813003103-679351
start_stop_delete_test.go:284: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.Kubelet}} -p embed-certs-20210813003103-679351 -n embed-certs-20210813003103-679351: exit status 2 (250.046555ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:284: status error: exit status 2 (may be ok)
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 unpause -p embed-certs-20210813003103-679351 --alsologtostderr -v=1
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.APIServer}} -p embed-certs-20210813003103-679351 -n embed-certs-20210813003103-679351
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Kubelet}} -p embed-certs-20210813003103-679351 -n embed-certs-20210813003103-679351
--- PASS: TestStartStop/group/embed-certs/serial/Pause (2.61s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/UserAppExistsAfterStop (5.02s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/UserAppExistsAfterStop
start_stop_delete_test.go:247: (dbg) TestStartStop/group/old-k8s-version/serial/UserAppExistsAfterStop: waiting 9m0s for pods matching "k8s-app=kubernetes-dashboard" in namespace "kubernetes-dashboard" ...
helpers_test.go:343: "kubernetes-dashboard-5d8978d65d-7fsd4" [341ce641-fbcf-11eb-aa68-525400670fb0] Running
start_stop_delete_test.go:247: (dbg) TestStartStop/group/old-k8s-version/serial/UserAppExistsAfterStop: k8s-app=kubernetes-dashboard healthy within 5.016433453s
--- PASS: TestStartStop/group/old-k8s-version/serial/UserAppExistsAfterStop (5.02s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/AddonExistsAfterStop (5.26s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/AddonExistsAfterStop
start_stop_delete_test.go:260: (dbg) TestStartStop/group/old-k8s-version/serial/AddonExistsAfterStop: waiting 9m0s for pods matching "k8s-app=kubernetes-dashboard" in namespace "kubernetes-dashboard" ...
helpers_test.go:343: "kubernetes-dashboard-5d8978d65d-7fsd4" [341ce641-fbcf-11eb-aa68-525400670fb0] Running
start_stop_delete_test.go:260: (dbg) TestStartStop/group/old-k8s-version/serial/AddonExistsAfterStop: k8s-app=kubernetes-dashboard healthy within 5.009533629s
start_stop_delete_test.go:264: (dbg) Run:  kubectl --context old-k8s-version-20210813002926-679351 describe deploy/dashboard-metrics-scraper -n kubernetes-dashboard
--- PASS: TestStartStop/group/old-k8s-version/serial/AddonExistsAfterStop (5.26s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/VerifyKubernetesImages (0.31s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/VerifyKubernetesImages
start_stop_delete_test.go:277: (dbg) Run:  out/minikube-linux-amd64 ssh -p old-k8s-version-20210813002926-679351 "sudo crictl images -o json"
start_stop_delete_test.go:277: Found non-minikube image: kindest/kindnetd:v20210326-1e038dc5
start_stop_delete_test.go:277: Found non-minikube image: library/busybox:1.28.4-glibc
--- PASS: TestStartStop/group/old-k8s-version/serial/VerifyKubernetesImages (0.31s)

                                                
                                    
x
+
TestStartStop/group/old-k8s-version/serial/Pause (2.92s)

                                                
                                                
=== RUN   TestStartStop/group/old-k8s-version/serial/Pause
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 pause -p old-k8s-version-20210813002926-679351 --alsologtostderr -v=1
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.APIServer}} -p old-k8s-version-20210813002926-679351 -n old-k8s-version-20210813002926-679351
start_stop_delete_test.go:284: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.APIServer}} -p old-k8s-version-20210813002926-679351 -n old-k8s-version-20210813002926-679351: exit status 2 (267.120925ms)

                                                
                                                
-- stdout --
	Paused

                                                
                                                
-- /stdout --
start_stop_delete_test.go:284: status error: exit status 2 (may be ok)
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Kubelet}} -p old-k8s-version-20210813002926-679351 -n old-k8s-version-20210813002926-679351
start_stop_delete_test.go:284: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.Kubelet}} -p old-k8s-version-20210813002926-679351 -n old-k8s-version-20210813002926-679351: exit status 2 (263.458453ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:284: status error: exit status 2 (may be ok)
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 unpause -p old-k8s-version-20210813002926-679351 --alsologtostderr -v=1
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.APIServer}} -p old-k8s-version-20210813002926-679351 -n old-k8s-version-20210813002926-679351
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Kubelet}} -p old-k8s-version-20210813002926-679351 -n old-k8s-version-20210813002926-679351
--- PASS: TestStartStop/group/old-k8s-version/serial/Pause (2.92s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/DeployApp (0s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/DeployApp
--- PASS: TestStartStop/group/newest-cni/serial/DeployApp (0.00s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/EnableAddonWhileActive (1.04s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/EnableAddonWhileActive
start_stop_delete_test.go:178: (dbg) Run:  out/minikube-linux-amd64 addons enable metrics-server -p newest-cni-20210813004154-679351 --images=MetricsServer=k8s.gcr.io/echoserver:1.4 --registries=MetricsServer=fake.domain
start_stop_delete_test.go:178: (dbg) Done: out/minikube-linux-amd64 addons enable metrics-server -p newest-cni-20210813004154-679351 --images=MetricsServer=k8s.gcr.io/echoserver:1.4 --registries=MetricsServer=fake.domain: (1.042172596s)
start_stop_delete_test.go:184: WARNING: cni mode requires additional setup before pods can schedule :(
--- PASS: TestStartStop/group/newest-cni/serial/EnableAddonWhileActive (1.04s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/Stop (5.11s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/Stop
start_stop_delete_test.go:201: (dbg) Run:  out/minikube-linux-amd64 stop -p newest-cni-20210813004154-679351 --alsologtostderr -v=3
start_stop_delete_test.go:201: (dbg) Done: out/minikube-linux-amd64 stop -p newest-cni-20210813004154-679351 --alsologtostderr -v=3: (5.107320031s)
--- PASS: TestStartStop/group/newest-cni/serial/Stop (5.11s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/EnableAddonAfterStop (0.15s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/EnableAddonAfterStop
start_stop_delete_test.go:212: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Host}} -p newest-cni-20210813004154-679351 -n newest-cni-20210813004154-679351
start_stop_delete_test.go:212: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.Host}} -p newest-cni-20210813004154-679351 -n newest-cni-20210813004154-679351: exit status 7 (67.837613ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:212: status error: exit status 7 (may be ok)
start_stop_delete_test.go:219: (dbg) Run:  out/minikube-linux-amd64 addons enable dashboard -p newest-cni-20210813004154-679351 --images=MetricsScraper=k8s.gcr.io/echoserver:1.4
--- PASS: TestStartStop/group/newest-cni/serial/EnableAddonAfterStop (0.15s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/SecondStart (114.31s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/SecondStart
start_stop_delete_test.go:229: (dbg) Run:  out/minikube-linux-amd64 start -p newest-cni-20210813004154-679351 --memory=2200 --alsologtostderr --wait=apiserver,system_pods,default_sa --feature-gates ServerSideApply=true --network-plugin=cni --extra-config=kubelet.network-plugin=cni --extra-config=kubeadm.pod-network-cidr=192.168.111.111/16 --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.22.0-rc.0
E0813 00:43:27.346496  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/cilium-20210813002105-679351/client.crt: no such file or directory
E0813 00:43:49.240633  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/addons-20210812234043-679351/client.crt: no such file or directory
E0813 00:43:50.977352  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/no-preload-20210813003127-679351/client.crt: no such file or directory
E0813 00:43:50.982690  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/no-preload-20210813003127-679351/client.crt: no such file or directory
E0813 00:43:50.992989  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/no-preload-20210813003127-679351/client.crt: no such file or directory
E0813 00:43:51.013315  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/no-preload-20210813003127-679351/client.crt: no such file or directory
E0813 00:43:51.053654  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/no-preload-20210813003127-679351/client.crt: no such file or directory
E0813 00:43:51.133936  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/no-preload-20210813003127-679351/client.crt: no such file or directory
E0813 00:43:51.294348  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/no-preload-20210813003127-679351/client.crt: no such file or directory
E0813 00:43:51.614937  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/no-preload-20210813003127-679351/client.crt: no such file or directory
E0813 00:43:52.255853  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/no-preload-20210813003127-679351/client.crt: no such file or directory
E0813 00:43:52.256993  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/custom-weave-20210813002105-679351/client.crt: no such file or directory
E0813 00:43:53.536096  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/no-preload-20210813003127-679351/client.crt: no such file or directory
E0813 00:43:56.096533  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/no-preload-20210813003127-679351/client.crt: no such file or directory
E0813 00:44:01.217639  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/no-preload-20210813003127-679351/client.crt: no such file or directory
E0813 00:44:06.150215  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/flannel-20210813002105-679351/client.crt: no such file or directory
E0813 00:44:11.458040  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/no-preload-20210813003127-679351/client.crt: no such file or directory
E0813 00:44:31.938459  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/no-preload-20210813003127-679351/client.crt: no such file or directory
E0813 00:45:12.899517  679351 cert_rotation.go:168] key failed with : open /home/jenkins/minikube-integration/linux-amd64-kvm2-containerd-12230-675919-1c76ff5cea01605c2d985c010644edf1e689d34b/.minikube/profiles/no-preload-20210813003127-679351/client.crt: no such file or directory
start_stop_delete_test.go:229: (dbg) Done: out/minikube-linux-amd64 start -p newest-cni-20210813004154-679351 --memory=2200 --alsologtostderr --wait=apiserver,system_pods,default_sa --feature-gates ServerSideApply=true --network-plugin=cni --extra-config=kubelet.network-plugin=cni --extra-config=kubeadm.pod-network-cidr=192.168.111.111/16 --driver=kvm2  --container-runtime=containerd --kubernetes-version=v1.22.0-rc.0: (1m54.049171161s)
start_stop_delete_test.go:235: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Host}} -p newest-cni-20210813004154-679351 -n newest-cni-20210813004154-679351
--- PASS: TestStartStop/group/newest-cni/serial/SecondStart (114.31s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/UserAppExistsAfterStop (0s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/UserAppExistsAfterStop
start_stop_delete_test.go:246: WARNING: cni mode requires additional setup before pods can schedule :(
--- PASS: TestStartStop/group/newest-cni/serial/UserAppExistsAfterStop (0.00s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/AddonExistsAfterStop (0s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/AddonExistsAfterStop
start_stop_delete_test.go:257: WARNING: cni mode requires additional setup before pods can schedule :(
--- PASS: TestStartStop/group/newest-cni/serial/AddonExistsAfterStop (0.00s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/VerifyKubernetesImages (0.25s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/VerifyKubernetesImages
start_stop_delete_test.go:277: (dbg) Run:  out/minikube-linux-amd64 ssh -p newest-cni-20210813004154-679351 "sudo crictl images -o json"
start_stop_delete_test.go:277: Found non-minikube image: kindest/kindnetd:v20210326-1e038dc5
--- PASS: TestStartStop/group/newest-cni/serial/VerifyKubernetesImages (0.25s)

                                                
                                    
x
+
TestStartStop/group/newest-cni/serial/Pause (2.18s)

                                                
                                                
=== RUN   TestStartStop/group/newest-cni/serial/Pause
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 pause -p newest-cni-20210813004154-679351 --alsologtostderr -v=1
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.APIServer}} -p newest-cni-20210813004154-679351 -n newest-cni-20210813004154-679351
start_stop_delete_test.go:284: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.APIServer}} -p newest-cni-20210813004154-679351 -n newest-cni-20210813004154-679351: exit status 2 (247.853236ms)

                                                
                                                
-- stdout --
	Paused

                                                
                                                
-- /stdout --
start_stop_delete_test.go:284: status error: exit status 2 (may be ok)
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Kubelet}} -p newest-cni-20210813004154-679351 -n newest-cni-20210813004154-679351
start_stop_delete_test.go:284: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.Kubelet}} -p newest-cni-20210813004154-679351 -n newest-cni-20210813004154-679351: exit status 2 (240.256865ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:284: status error: exit status 2 (may be ok)
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 unpause -p newest-cni-20210813004154-679351 --alsologtostderr -v=1
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.APIServer}} -p newest-cni-20210813004154-679351 -n newest-cni-20210813004154-679351
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Kubelet}} -p newest-cni-20210813004154-679351 -n newest-cni-20210813004154-679351
--- PASS: TestStartStop/group/newest-cni/serial/Pause (2.18s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-different-port/serial/UserAppExistsAfterStop (5.04s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-different-port/serial/UserAppExistsAfterStop
start_stop_delete_test.go:247: (dbg) TestStartStop/group/default-k8s-different-port/serial/UserAppExistsAfterStop: waiting 9m0s for pods matching "k8s-app=kubernetes-dashboard" in namespace "kubernetes-dashboard" ...
helpers_test.go:343: "kubernetes-dashboard-6fcdf4f6d-c2cn9" [7743297d-ef21-41dd-b227-8617f928dcc3] Running
start_stop_delete_test.go:247: (dbg) TestStartStop/group/default-k8s-different-port/serial/UserAppExistsAfterStop: k8s-app=kubernetes-dashboard healthy within 5.039384691s
--- PASS: TestStartStop/group/default-k8s-different-port/serial/UserAppExistsAfterStop (5.04s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-different-port/serial/AddonExistsAfterStop (5.1s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-different-port/serial/AddonExistsAfterStop
start_stop_delete_test.go:260: (dbg) TestStartStop/group/default-k8s-different-port/serial/AddonExistsAfterStop: waiting 9m0s for pods matching "k8s-app=kubernetes-dashboard" in namespace "kubernetes-dashboard" ...
helpers_test.go:343: "kubernetes-dashboard-6fcdf4f6d-c2cn9" [7743297d-ef21-41dd-b227-8617f928dcc3] Running
start_stop_delete_test.go:260: (dbg) TestStartStop/group/default-k8s-different-port/serial/AddonExistsAfterStop: k8s-app=kubernetes-dashboard healthy within 5.008682647s
start_stop_delete_test.go:264: (dbg) Run:  kubectl --context default-k8s-different-port-20210813003533-679351 describe deploy/dashboard-metrics-scraper -n kubernetes-dashboard
--- PASS: TestStartStop/group/default-k8s-different-port/serial/AddonExistsAfterStop (5.10s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-different-port/serial/VerifyKubernetesImages (0.26s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-different-port/serial/VerifyKubernetesImages
start_stop_delete_test.go:277: (dbg) Run:  out/minikube-linux-amd64 ssh -p default-k8s-different-port-20210813003533-679351 "sudo crictl images -o json"
start_stop_delete_test.go:277: Found non-minikube image: kindest/kindnetd:v20210326-1e038dc5
start_stop_delete_test.go:277: Found non-minikube image: library/busybox:1.28.4-glibc
--- PASS: TestStartStop/group/default-k8s-different-port/serial/VerifyKubernetesImages (0.26s)

                                                
                                    
x
+
TestStartStop/group/default-k8s-different-port/serial/Pause (2.49s)

                                                
                                                
=== RUN   TestStartStop/group/default-k8s-different-port/serial/Pause
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 pause -p default-k8s-different-port-20210813003533-679351 --alsologtostderr -v=1
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.APIServer}} -p default-k8s-different-port-20210813003533-679351 -n default-k8s-different-port-20210813003533-679351
start_stop_delete_test.go:284: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.APIServer}} -p default-k8s-different-port-20210813003533-679351 -n default-k8s-different-port-20210813003533-679351: exit status 2 (239.617909ms)

                                                
                                                
-- stdout --
	Paused

                                                
                                                
-- /stdout --
start_stop_delete_test.go:284: status error: exit status 2 (may be ok)
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Kubelet}} -p default-k8s-different-port-20210813003533-679351 -n default-k8s-different-port-20210813003533-679351
start_stop_delete_test.go:284: (dbg) Non-zero exit: out/minikube-linux-amd64 status --format={{.Kubelet}} -p default-k8s-different-port-20210813003533-679351 -n default-k8s-different-port-20210813003533-679351: exit status 2 (239.215185ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
start_stop_delete_test.go:284: status error: exit status 2 (may be ok)
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 unpause -p default-k8s-different-port-20210813003533-679351 --alsologtostderr -v=1
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.APIServer}} -p default-k8s-different-port-20210813003533-679351 -n default-k8s-different-port-20210813003533-679351
start_stop_delete_test.go:284: (dbg) Run:  out/minikube-linux-amd64 status --format={{.Kubelet}} -p default-k8s-different-port-20210813003533-679351 -n default-k8s-different-port-20210813003533-679351
--- PASS: TestStartStop/group/default-k8s-different-port/serial/Pause (2.49s)

                                                
                                    

Test skip (28/263)

x
+
TestDownloadOnly/v1.14.0/cached-images (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.14.0/cached-images
aaa_download_only_test.go:119: Preload exists, images won't be cached
--- SKIP: TestDownloadOnly/v1.14.0/cached-images (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.14.0/binaries (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.14.0/binaries
aaa_download_only_test.go:138: Preload exists, binaries are present within.
--- SKIP: TestDownloadOnly/v1.14.0/binaries (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.14.0/kubectl (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.14.0/kubectl
aaa_download_only_test.go:154: Test for darwin and windows
--- SKIP: TestDownloadOnly/v1.14.0/kubectl (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.21.3/cached-images (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.21.3/cached-images
aaa_download_only_test.go:119: Preload exists, images won't be cached
--- SKIP: TestDownloadOnly/v1.21.3/cached-images (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.21.3/binaries (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.21.3/binaries
aaa_download_only_test.go:138: Preload exists, binaries are present within.
--- SKIP: TestDownloadOnly/v1.21.3/binaries (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.21.3/kubectl (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.21.3/kubectl
aaa_download_only_test.go:154: Test for darwin and windows
--- SKIP: TestDownloadOnly/v1.21.3/kubectl (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.22.0-rc.0/cached-images (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.22.0-rc.0/cached-images
aaa_download_only_test.go:119: Preload exists, images won't be cached
--- SKIP: TestDownloadOnly/v1.22.0-rc.0/cached-images (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.22.0-rc.0/binaries (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.22.0-rc.0/binaries
aaa_download_only_test.go:138: Preload exists, binaries are present within.
--- SKIP: TestDownloadOnly/v1.22.0-rc.0/binaries (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.22.0-rc.0/kubectl (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.22.0-rc.0/kubectl
aaa_download_only_test.go:154: Test for darwin and windows
--- SKIP: TestDownloadOnly/v1.22.0-rc.0/kubectl (0.00s)

                                                
                                    
x
+
TestDownloadOnlyKic (0s)

                                                
                                                
=== RUN   TestDownloadOnlyKic
aaa_download_only_test.go:212: skipping, only for docker or podman driver
--- SKIP: TestDownloadOnlyKic (0.00s)

                                                
                                    
x
+
TestDockerFlags (0s)

                                                
                                                
=== RUN   TestDockerFlags
docker_test.go:35: skipping: only runs with docker container runtime, currently testing containerd
--- SKIP: TestDockerFlags (0.00s)

                                                
                                    
x
+
TestHyperKitDriverInstallOrUpdate (0s)

                                                
                                                
=== RUN   TestHyperKitDriverInstallOrUpdate
driver_install_or_update_test.go:115: Skip if not darwin.
--- SKIP: TestHyperKitDriverInstallOrUpdate (0.00s)

                                                
                                    
x
+
TestHyperkitDriverSkipUpgrade (0s)

                                                
                                                
=== RUN   TestHyperkitDriverSkipUpgrade
driver_install_or_update_test.go:188: Skip if not darwin.
--- SKIP: TestHyperkitDriverSkipUpgrade (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/DockerEnv (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/DockerEnv
=== PAUSE TestFunctional/parallel/DockerEnv

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/DockerEnv
functional_test.go:467: only validate docker env with docker container runtime, currently testing containerd
--- SKIP: TestFunctional/parallel/DockerEnv (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/PodmanEnv (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/PodmanEnv
=== PAUSE TestFunctional/parallel/PodmanEnv

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/PodmanEnv
functional_test.go:527: only validate podman env with docker container runtime, currently testing containerd
--- SKIP: TestFunctional/parallel/PodmanEnv (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDig (0.01s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDig
functional_test_tunnel_test.go:96: DNS forwarding is supported for darwin only now, skipping test DNS forwarding
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDig (0.01s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDscacheutil (0.01s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDscacheutil
functional_test_tunnel_test.go:96: DNS forwarding is supported for darwin only now, skipping test DNS forwarding
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDscacheutil (0.01s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/AccessThroughDNS (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/AccessThroughDNS
functional_test_tunnel_test.go:96: DNS forwarding is supported for darwin only now, skipping test DNS forwarding
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/AccessThroughDNS (0.00s)

                                                
                                    
x
+
TestGvisorAddon (0s)

                                                
                                                
=== RUN   TestGvisorAddon
gvisor_addon_test.go:34: skipping test because --gvisor=false
--- SKIP: TestGvisorAddon (0.00s)

                                                
                                    
x
+
TestKicCustomNetwork (0s)

                                                
                                                
=== RUN   TestKicCustomNetwork
kic_custom_network_test.go:34: only runs with docker driver
--- SKIP: TestKicCustomNetwork (0.00s)

                                                
                                    
x
+
TestKicExistingNetwork (0s)

                                                
                                                
=== RUN   TestKicExistingNetwork
kic_custom_network_test.go:73: only runs with docker driver
--- SKIP: TestKicExistingNetwork (0.00s)

                                                
                                    
x
+
TestChangeNoneUser (0s)

                                                
                                                
=== RUN   TestChangeNoneUser
none_test.go:39: Only test none driver.
--- SKIP: TestChangeNoneUser (0.00s)

                                                
                                    
x
+
TestScheduledStopWindows (0s)

                                                
                                                
=== RUN   TestScheduledStopWindows
scheduled_stop_test.go:43: test only runs on windows
--- SKIP: TestScheduledStopWindows (0.00s)

                                                
                                    
x
+
TestSkaffold (0s)

                                                
                                                
=== RUN   TestSkaffold
skaffold_test.go:43: skaffold requires docker-env, currently testing containerd container runtime
--- SKIP: TestSkaffold (0.00s)

                                                
                                    
x
+
TestInsufficientStorage (0s)

                                                
                                                
=== RUN   TestInsufficientStorage
status_test.go:38: only runs with docker driver
--- SKIP: TestInsufficientStorage (0.00s)

                                                
                                    
x
+
TestMissingContainerUpgrade (0s)

                                                
                                                
=== RUN   TestMissingContainerUpgrade
version_upgrade_test.go:286: This test is only for Docker
--- SKIP: TestMissingContainerUpgrade (0.00s)

                                                
                                    
x
+
TestNetworkPlugins/group/kubenet (0.25s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/kubenet
net_test.go:88: Skipping the test as containerd container runtimes requires CNI
helpers_test.go:176: Cleaning up "kubenet-20210813002105-679351" profile ...
helpers_test.go:179: (dbg) Run:  out/minikube-linux-amd64 delete -p kubenet-20210813002105-679351
--- SKIP: TestNetworkPlugins/group/kubenet (0.25s)

                                                
                                    
x
+
TestStartStop/group/disable-driver-mounts (0.32s)

                                                
                                                
=== RUN   TestStartStop/group/disable-driver-mounts
=== PAUSE TestStartStop/group/disable-driver-mounts

                                                
                                                

                                                
                                                
=== CONT  TestStartStop/group/disable-driver-mounts
start_stop_delete_test.go:91: skipping TestStartStop/group/disable-driver-mounts - only runs on virtualbox
helpers_test.go:176: Cleaning up "disable-driver-mounts-20210813003127-679351" profile ...
helpers_test.go:179: (dbg) Run:  out/minikube-linux-amd64 delete -p disable-driver-mounts-20210813003127-679351
--- SKIP: TestStartStop/group/disable-driver-mounts (0.32s)

                                                
                                    
Copied to clipboard