-
Notifications
You must be signed in to change notification settings - Fork 0
/
logs.txt
1860 lines (1749 loc) · 300 KB
/
logs.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
==> Audit <==
|--------------|--------------------------------|----------|------------|---------|----------------------|----------------------|
| Command | Args | Profile | User | Version | Start Time | End Time |
|--------------|--------------------------------|----------|------------|---------|----------------------|----------------------|
| update-check | | minikube | ofidohubvm | v1.34.0 | 24 Oct 24 15:54 SAST | 24 Oct 24 15:54 SAST |
| update-check | | minikube | ofidohubvm | v1.34.0 | 24 Oct 24 23:58 SAST | 24 Oct 24 23:58 SAST |
| start | | minikube | ofidohubvm | v1.34.0 | 25 Oct 24 00:26 SAST | 25 Oct 24 00:49 SAST |
| dashboard | | minikube | ofidohubvm | v1.34.0 | 25 Oct 24 00:56 SAST | |
| addons | enable metrics-server | minikube | ofidohubvm | v1.34.0 | 25 Oct 24 00:59 SAST | 25 Oct 24 01:00 SAST |
| update-check | | minikube | ofidohubvm | v1.34.0 | 25 Oct 24 08:55 SAST | 25 Oct 24 08:55 SAST |
| start | | minikube | ofidohubvm | v1.34.0 | 25 Oct 24 08:58 SAST | |
| start | | minikube | ofidohubvm | v1.34.0 | 25 Oct 24 08:59 SAST | 25 Oct 24 09:01 SAST |
| addons | enable metrics-server | minikube | ofidohubvm | v1.34.0 | 25 Oct 24 09:01 SAST | 25 Oct 24 09:02 SAST |
| dashboard | | minikube | ofidohubvm | v1.34.0 | 25 Oct 24 09:02 SAST | |
| start | | minikube | ofidohubvm | v1.34.0 | 25 Oct 24 14:01 SAST | 25 Oct 24 14:03 SAST |
| addons | enable metrics-server | minikube | ofidohubvm | v1.34.0 | 25 Oct 24 14:14 SAST | 25 Oct 24 14:14 SAST |
| update-check | | minikube | ofidohubvm | v1.34.0 | 25 Oct 24 14:57 SAST | 25 Oct 24 14:57 SAST |
| dashboard | | minikube | ofidohubvm | v1.34.0 | 25 Oct 24 15:05 SAST | |
| image | load | minikube | ofidohubvm | v1.34.0 | 25 Oct 24 15:40 SAST | |
| | ofidohubvm/atoinium:latest | | | | | |
| image | load | minikube | ofidohubvm | v1.34.0 | 25 Oct 24 16:09 SAST | |
| | ofidohubvm/atoinium:latest | | | | | |
| stop | | minikube | ofidohubvm | v1.34.0 | 25 Oct 24 16:10 SAST | 25 Oct 24 16:11 SAST |
| start | | minikube | ofidohubvm | v1.34.0 | 25 Oct 24 16:15 SAST | |
| update-check | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 08:56 SAST | 26 Oct 24 08:56 SAST |
| start | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 08:57 SAST | 26 Oct 24 09:00 SAST |
| addons | enable metrics-server | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 09:00 SAST | 26 Oct 24 09:01 SAST |
| update-check | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 09:43 SAST | 26 Oct 24 09:43 SAST |
| start | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 09:44 SAST | 26 Oct 24 09:49 SAST |
| addons | enable metrics-server | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 09:50 SAST | 26 Oct 24 09:50 SAST |
| dashboard | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 09:50 SAST | |
| start | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 10:20 SAST | 26 Oct 24 10:28 SAST |
| dashboard | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 10:29 SAST | |
| update-check | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 11:05 SAST | 26 Oct 24 11:05 SAST |
| update-check | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 12:27 SAST | 26 Oct 24 12:27 SAST |
| update-check | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 12:56 SAST | 26 Oct 24 12:56 SAST |
| update-check | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 13:20 SAST | 26 Oct 24 13:20 SAST |
| update-check | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 15:55 SAST | 26 Oct 24 15:55 SAST |
| update-check | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 16:16 SAST | 26 Oct 24 16:16 SAST |
| update-check | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 16:25 SAST | 26 Oct 24 16:25 SAST |
| update-check | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 17:10 SAST | 26 Oct 24 17:10 SAST |
| start | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 17:48 SAST | |
| start | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 18:08 SAST | |
| update-check | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 18:10 SAST | 26 Oct 24 18:10 SAST |
| start | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 18:15 SAST | 26 Oct 24 18:20 SAST |
| addons | enable metrics-server | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 18:30 SAST | 26 Oct 24 18:31 SAST |
| start | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 19:17 SAST | |
| update-check | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 19:23 SAST | 26 Oct 24 19:23 SAST |
| start | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 19:23 SAST | |
| config | set driver docker | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 19:24 SAST | 26 Oct 24 19:24 SAST |
| delete | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 19:25 SAST | 26 Oct 24 19:25 SAST |
| start | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 19:25 SAST | 26 Oct 24 19:28 SAST |
| dashboard | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 20:21 SAST | |
| dashboard | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 20:46 SAST | |
| stop | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 21:39 SAST | 26 Oct 24 21:42 SAST |
| start | | minikube | ofidohubvm | v1.34.0 | 26 Oct 24 21:42 SAST | |
|--------------|--------------------------------|----------|------------|---------|----------------------|----------------------|
==> Last Start <==
Log file created at: 2024/10/26 21:42:36
Running on machine: Nobuhles-MacBook-Pro
Binary: Built with gc go1.23.1 for darwin/amd64
Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
I1026 21:42:36.835491 41789 out.go:345] Setting OutFile to fd 1 ...
I1026 21:42:36.886477 41789 out.go:397] isatty.IsTerminal(1) = true
I1026 21:42:36.886497 41789 out.go:358] Setting ErrFile to fd 2...
I1026 21:42:36.886521 41789 out.go:397] isatty.IsTerminal(2) = true
I1026 21:42:36.912427 41789 root.go:338] Updating PATH: /Users/ofidohubvm/.minikube/bin
I1026 21:42:37.304023 41789 out.go:352] Setting JSON to false
I1026 21:42:37.496603 41789 start.go:129] hostinfo: {"hostname":"Nobuhles-MacBook-Pro.local","uptime":8913,"bootTime":1729962844,"procs":332,"os":"darwin","platform":"darwin","platformFamily":"Standalone Workstation","platformVersion":"13.7.1","kernelVersion":"22.6.0","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"73cb8189-d8e0-52bc-a4f2-e222f599b850"}
W1026 21:42:37.522805 41789 start.go:137] gopshost.Virtualization returned error: not implemented yet
I1026 21:42:37.612622 41789 out.go:177] 😄 minikube v1.34.0 on Darwin 13.7.1
I1026 21:42:37.713984 41789 config.go:182] Loaded profile config "minikube": Driver=docker, ContainerRuntime=docker, KubernetesVersion=v1.31.0
I1026 21:42:37.829160 41789 notify.go:220] Checking for updates...
I1026 21:42:37.876363 41789 driver.go:394] Setting default libvirt URI to qemu:///system
I1026 21:42:39.368140 41789 docker.go:123] docker version: linux-27.2.0:Docker Desktop 4.34.3 (170107)
I1026 21:42:39.377457 41789 cli_runner.go:164] Run: docker system info --format "{{json .}}"
I1026 21:42:47.159818 41789 cli_runner.go:217] Completed: docker system info --format "{{json .}}": (7.781578876s)
I1026 21:42:47.368917 41789 info.go:266] docker info: {ID:15fff506-3cd3-42bd-bc86-839ea9a862e3 Containers:44 ContainersRunning:29 ContainersPaused:0 ContainersStopped:15 Images:19 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:false CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:361 OomKillDisable:false NGoroutines:413 SystemTime:2024-10-26 19:42:46.743441055 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:16 KernelVersion:6.10.4-linuxkit OperatingSystem:Docker Desktop OSType:linux Architecture:x86_64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:4 MemTotal:4104826880 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy:http.docker.internal:3128 HTTPSProxy:http.docker.internal:3128 NoProxy:hubproxy.docker.internal Name:docker-desktop Labels:[com.docker.desktop.address=unix:///Users/ofidohubvm/Library/Containers/com.docker.docker/Data/docker-cli.sock] ExperimentalBuild:false ServerVersion:27.2.0 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:8fc6bcff51318944179630522a095cc9dbf9f353 Expected:8fc6bcff51318944179630522a095cc9dbf9f353} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=seccomp,profile=unconfined name=cgroupns] ProductLicense: Warnings:[WARNING: daemon is not using the default seccomp profile] ServerErrors:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/Users/ofidohubvm/.docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2-desktop.1] map[Name:compose Path:/Users/ofidohubvm/.docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShadowedPaths:[/usr/local/lib/docker/cli-plugins/docker-compose] ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2-desktop.2] map[Name:debug Path:/Users/ofidohubvm/.docker/cli-plugins/docker-debug SchemaVersion:0.1.0 ShortDescription:Get a shell into any image or container Vendor:Docker Inc. Version:0.0.34] map[Name:desktop Path:/Users/ofidohubvm/.docker/cli-plugins/docker-desktop SchemaVersion:0.1.0 ShortDescription:Docker Desktop commands (Alpha) Vendor:Docker Inc. Version:v0.0.15] map[Name:dev Path:/Users/ofidohubvm/.docker/cli-plugins/docker-dev SchemaVersion:0.1.0 ShortDescription:Docker Dev Environments Vendor:Docker Inc. Version:v0.1.2] map[Name:extension Path:/Users/ofidohubvm/.docker/cli-plugins/docker-extension SchemaVersion:0.1.0 ShortDescription:Manages Docker extensions Vendor:Docker Inc. Version:v0.2.25] map[Name:feedback Path:/Users/ofidohubvm/.docker/cli-plugins/docker-feedback SchemaVersion:0.1.0 ShortDescription:Provide feedback, right in your terminal! Vendor:Docker Inc. Version:v1.0.5] map[Name:init Path:/Users/ofidohubvm/.docker/cli-plugins/docker-init SchemaVersion:0.1.0 ShortDescription:Creates Docker-related starter files for your project Vendor:Docker Inc. Version:v1.3.0] map[Name:sbom Path:/Users/ofidohubvm/.docker/cli-plugins/docker-sbom SchemaVersion:0.1.0 ShortDescription:View the packaged-based Software Bill Of Materials (SBOM) for an image URL:https://github.com/docker/sbom-cli-plugin Vendor:Anchore Inc. Version:0.6.0] map[Name:scout Path:/Users/ofidohubvm/.docker/cli-plugins/docker-scout SchemaVersion:0.1.0 ShortDescription:Docker Scout Vendor:Docker Inc. Version:v1.13.0]] Warnings:<nil>}}
I1026 21:42:47.454586 41789 out.go:177] ✨ Using the docker driver based on existing profile
I1026 21:42:47.524675 41789 start.go:297] selected driver: docker
I1026 21:42:47.525456 41789 start.go:901] validating driver "docker" against &{Name:minikube KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.45@sha256:81df288595202a317b1a4dc2506ca2e4ed5f22373c19a441b88cfbf4b9867c85 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.0 ClusterName:minikube Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.0 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[dashboard:true default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/Users:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
I1026 21:42:47.525980 41789 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
I1026 21:42:47.527247 41789 cli_runner.go:164] Run: docker system info --format "{{json .}}"
I1026 21:42:48.349752 41789 info.go:266] docker info: {ID:15fff506-3cd3-42bd-bc86-839ea9a862e3 Containers:44 ContainersRunning:29 ContainersPaused:0 ContainersStopped:15 Images:19 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:false CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:381 OomKillDisable:false NGoroutines:454 SystemTime:2024-10-26 19:42:48.1453141 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:17 KernelVersion:6.10.4-linuxkit OperatingSystem:Docker Desktop OSType:linux Architecture:x86_64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:4 MemTotal:4104826880 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy:http.docker.internal:3128 HTTPSProxy:http.docker.internal:3128 NoProxy:hubproxy.docker.internal Name:docker-desktop Labels:[com.docker.desktop.address=unix:///Users/ofidohubvm/Library/Containers/com.docker.docker/Data/docker-cli.sock] ExperimentalBuild:false ServerVersion:27.2.0 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:8fc6bcff51318944179630522a095cc9dbf9f353 Expected:8fc6bcff51318944179630522a095cc9dbf9f353} RuncCommit:{ID:v1.1.13-0-g58aa920 Expected:v1.1.13-0-g58aa920} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=seccomp,profile=unconfined name=cgroupns] ProductLicense: Warnings:[WARNING: daemon is not using the default seccomp profile] ServerErrors:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/Users/ofidohubvm/.docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2-desktop.1] map[Name:compose Path:/Users/ofidohubvm/.docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShadowedPaths:[/usr/local/lib/docker/cli-plugins/docker-compose] ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2-desktop.2] map[Name:debug Path:/Users/ofidohubvm/.docker/cli-plugins/docker-debug SchemaVersion:0.1.0 ShortDescription:Get a shell into any image or container Vendor:Docker Inc. Version:0.0.34] map[Name:desktop Path:/Users/ofidohubvm/.docker/cli-plugins/docker-desktop SchemaVersion:0.1.0 ShortDescription:Docker Desktop commands (Alpha) Vendor:Docker Inc. Version:v0.0.15] map[Name:dev Path:/Users/ofidohubvm/.docker/cli-plugins/docker-dev SchemaVersion:0.1.0 ShortDescription:Docker Dev Environments Vendor:Docker Inc. Version:v0.1.2] map[Name:extension Path:/Users/ofidohubvm/.docker/cli-plugins/docker-extension SchemaVersion:0.1.0 ShortDescription:Manages Docker extensions Vendor:Docker Inc. Version:v0.2.25] map[Name:feedback Path:/Users/ofidohubvm/.docker/cli-plugins/docker-feedback SchemaVersion:0.1.0 ShortDescription:Provide feedback, right in your terminal! Vendor:Docker Inc. Version:v1.0.5] map[Name:init Path:/Users/ofidohubvm/.docker/cli-plugins/docker-init SchemaVersion:0.1.0 ShortDescription:Creates Docker-related starter files for your project Vendor:Docker Inc. Version:v1.3.0] map[Name:sbom Path:/Users/ofidohubvm/.docker/cli-plugins/docker-sbom SchemaVersion:0.1.0 ShortDescription:View the packaged-based Software Bill Of Materials (SBOM) for an image URL:https://github.com/docker/sbom-cli-plugin Vendor:Anchore Inc. Version:0.6.0] map[Name:scout Path:/Users/ofidohubvm/.docker/cli-plugins/docker-scout SchemaVersion:0.1.0 ShortDescription:Docker Scout Vendor:Docker Inc. Version:v1.13.0]] Warnings:<nil>}}
I1026 21:42:48.545035 41789 cni.go:84] Creating CNI manager for ""
I1026 21:42:48.696185 41789 cni.go:158] "docker" driver + "docker" container runtime found on kubernetes v1.24+, recommending bridge
I1026 21:42:48.701000 41789 start.go:340] cluster config:
{Name:minikube KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.45@sha256:81df288595202a317b1a4dc2506ca2e4ed5f22373c19a441b88cfbf4b9867c85 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.0 ClusterName:minikube Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.0 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[dashboard:true default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/Users:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
I1026 21:42:48.792986 41789 out.go:177] 👍 Starting "minikube" primary control-plane node in "minikube" cluster
I1026 21:42:48.857726 41789 cache.go:121] Beginning downloading kic base image for docker with docker
I1026 21:42:48.907208 41789 out.go:177] 🚜 Pulling base image v0.0.45 ...
I1026 21:42:49.031804 41789 image.go:79] Checking for gcr.io/k8s-minikube/kicbase:v0.0.45@sha256:81df288595202a317b1a4dc2506ca2e4ed5f22373c19a441b88cfbf4b9867c85 in local docker daemon
I1026 21:42:49.031802 41789 preload.go:131] Checking if preload exists for k8s version v1.31.0 and runtime docker
I1026 21:42:49.032057 41789 preload.go:146] Found local preload: /Users/ofidohubvm/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.0-docker-overlay2-amd64.tar.lz4
I1026 21:42:49.032395 41789 cache.go:56] Caching tarball of preloaded images
I1026 21:42:49.035118 41789 preload.go:172] Found /Users/ofidohubvm/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.0-docker-overlay2-amd64.tar.lz4 in cache, skipping download
I1026 21:42:49.035255 41789 cache.go:59] Finished verifying existence of preloaded tar for v1.31.0 on docker
I1026 21:42:49.036837 41789 profile.go:143] Saving config to /Users/ofidohubvm/.minikube/profiles/minikube/config.json ...
W1026 21:42:49.879794 41789 image.go:95] image gcr.io/k8s-minikube/kicbase:v0.0.45@sha256:81df288595202a317b1a4dc2506ca2e4ed5f22373c19a441b88cfbf4b9867c85 is of wrong architecture
I1026 21:42:49.879921 41789 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase:v0.0.45@sha256:81df288595202a317b1a4dc2506ca2e4ed5f22373c19a441b88cfbf4b9867c85 to local cache
I1026 21:42:49.880730 41789 image.go:63] Checking for gcr.io/k8s-minikube/kicbase:v0.0.45@sha256:81df288595202a317b1a4dc2506ca2e4ed5f22373c19a441b88cfbf4b9867c85 in local cache directory
I1026 21:42:49.881442 41789 image.go:66] Found gcr.io/k8s-minikube/kicbase:v0.0.45@sha256:81df288595202a317b1a4dc2506ca2e4ed5f22373c19a441b88cfbf4b9867c85 in local cache directory, skipping pull
I1026 21:42:49.897896 41789 image.go:135] gcr.io/k8s-minikube/kicbase:v0.0.45@sha256:81df288595202a317b1a4dc2506ca2e4ed5f22373c19a441b88cfbf4b9867c85 exists in cache, skipping pull
I1026 21:42:49.897937 41789 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase:v0.0.45@sha256:81df288595202a317b1a4dc2506ca2e4ed5f22373c19a441b88cfbf4b9867c85 as a tarball
I1026 21:42:49.897969 41789 cache.go:162] Loading gcr.io/k8s-minikube/kicbase:v0.0.45@sha256:81df288595202a317b1a4dc2506ca2e4ed5f22373c19a441b88cfbf4b9867c85 from local cache
I1026 21:43:00.700971 41789 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase:v0.0.45@sha256:81df288595202a317b1a4dc2506ca2e4ed5f22373c19a441b88cfbf4b9867c85 from cached tarball
I1026 21:43:00.702592 41789 cache.go:194] Successfully downloaded all kic artifacts
I1026 21:43:00.808567 41789 start.go:360] acquireMachinesLock for minikube: {Name:mk030935974f6ca87cc682729ba28d2c701a8310 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
I1026 21:43:00.812040 41789 start.go:364] duration metric: took 1.835184ms to acquireMachinesLock for "minikube"
I1026 21:43:00.812992 41789 start.go:96] Skipping create...Using existing machine configuration
I1026 21:43:00.838411 41789 fix.go:54] fixHost starting:
I1026 21:43:00.842198 41789 cli_runner.go:164] Run: docker container inspect minikube --format={{.State.Status}}
I1026 21:43:01.421277 41789 fix.go:112] recreateIfNeeded on minikube: state=Stopped err=<nil>
W1026 21:43:01.421441 41789 fix.go:138] unexpected machine state, will restart: <nil>
I1026 21:43:01.546525 41789 out.go:177] 🔄 Restarting existing docker container for "minikube" ...
I1026 21:43:01.627875 41789 cli_runner.go:164] Run: docker start minikube
I1026 21:43:26.337667 41789 cli_runner.go:217] Completed: docker start minikube: (24.707723917s)
I1026 21:43:26.340413 41789 cli_runner.go:164] Run: docker container inspect minikube --format={{.State.Status}}
I1026 21:43:27.149087 41789 kic.go:430] container "minikube" state is running.
I1026 21:43:27.215852 41789 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" minikube
I1026 21:43:27.631485 41789 profile.go:143] Saving config to /Users/ofidohubvm/.minikube/profiles/minikube/config.json ...
I1026 21:43:27.633930 41789 machine.go:93] provisionDockerMachine start ...
I1026 21:43:27.728048 41789 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" minikube
I1026 21:43:27.981358 41789 main.go:141] libmachine: Using SSH client type: native
I1026 21:43:28.414399 41789 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x105322dc0] 0x105325aa0 <nil> [] 0s} 127.0.0.1 53491 <nil> <nil>}
I1026 21:43:28.414427 41789 main.go:141] libmachine: About to run SSH command:
hostname
I1026 21:43:28.709419 41789 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
I1026 21:43:31.731478 41789 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
I1026 21:43:34.735104 41789 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
I1026 21:43:37.764711 41789 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
I1026 21:43:43.459806 41789 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
I1026 21:43:43.464258 41789 ubuntu.go:169] provisioning hostname "minikube"
I1026 21:43:43.472300 41789 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" minikube
I1026 21:43:43.555143 41789 main.go:141] libmachine: Using SSH client type: native
I1026 21:43:43.559347 41789 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x105322dc0] 0x105325aa0 <nil> [] 0s} 127.0.0.1 53491 <nil> <nil>}
I1026 21:43:43.559362 41789 main.go:141] libmachine: About to run SSH command:
sudo hostname minikube && echo "minikube" | sudo tee /etc/hostname
I1026 21:43:44.099134 41789 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
I1026 21:43:44.122301 41789 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" minikube
I1026 21:43:44.188311 41789 main.go:141] libmachine: Using SSH client type: native
I1026 21:43:44.188982 41789 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x105322dc0] 0x105325aa0 <nil> [] 0s} 127.0.0.1 53491 <nil> <nil>}
I1026 21:43:44.189004 41789 main.go:141] libmachine: About to run SSH command:
if ! grep -xq '.*\sminikube' /etc/hosts; then
if grep -xq '127.0.1.1\s.*' /etc/hosts; then
sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 minikube/g' /etc/hosts;
else
echo '127.0.1.1 minikube' | sudo tee -a /etc/hosts;
fi
fi
I1026 21:43:44.572780 41789 main.go:141] libmachine: SSH cmd err, output: <nil>:
I1026 21:43:44.573332 41789 ubuntu.go:175] set auth options {CertDir:/Users/ofidohubvm/.minikube CaCertPath:/Users/ofidohubvm/.minikube/certs/ca.pem CaPrivateKeyPath:/Users/ofidohubvm/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/Users/ofidohubvm/.minikube/machines/server.pem ServerKeyPath:/Users/ofidohubvm/.minikube/machines/server-key.pem ClientKeyPath:/Users/ofidohubvm/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/Users/ofidohubvm/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/Users/ofidohubvm/.minikube}
I1026 21:43:44.573403 41789 ubuntu.go:177] setting up certificates
I1026 21:43:44.573522 41789 provision.go:84] configureAuth start
I1026 21:43:44.573926 41789 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" minikube
I1026 21:43:44.716632 41789 provision.go:143] copyHostCerts
I1026 21:43:44.726143 41789 exec_runner.go:144] found /Users/ofidohubvm/.minikube/key.pem, removing ...
I1026 21:43:44.768598 41789 exec_runner.go:203] rm: /Users/ofidohubvm/.minikube/key.pem
I1026 21:43:44.770980 41789 exec_runner.go:151] cp: /Users/ofidohubvm/.minikube/certs/key.pem --> /Users/ofidohubvm/.minikube/key.pem (1679 bytes)
I1026 21:43:44.837626 41789 exec_runner.go:144] found /Users/ofidohubvm/.minikube/ca.pem, removing ...
I1026 21:43:44.837641 41789 exec_runner.go:203] rm: /Users/ofidohubvm/.minikube/ca.pem
I1026 21:43:44.837853 41789 exec_runner.go:151] cp: /Users/ofidohubvm/.minikube/certs/ca.pem --> /Users/ofidohubvm/.minikube/ca.pem (1090 bytes)
I1026 21:43:44.839030 41789 exec_runner.go:144] found /Users/ofidohubvm/.minikube/cert.pem, removing ...
I1026 21:43:44.839040 41789 exec_runner.go:203] rm: /Users/ofidohubvm/.minikube/cert.pem
I1026 21:43:44.839250 41789 exec_runner.go:151] cp: /Users/ofidohubvm/.minikube/certs/cert.pem --> /Users/ofidohubvm/.minikube/cert.pem (1131 bytes)
I1026 21:43:44.841620 41789 provision.go:117] generating server cert: /Users/ofidohubvm/.minikube/machines/server.pem ca-key=/Users/ofidohubvm/.minikube/certs/ca.pem private-key=/Users/ofidohubvm/.minikube/certs/ca-key.pem org=ofidohubvm.minikube san=[127.0.0.1 192.168.49.2 localhost minikube]
I1026 21:43:46.419104 41789 provision.go:177] copyRemoteCerts
I1026 21:43:46.449211 41789 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
I1026 21:43:46.449734 41789 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" minikube
I1026 21:43:46.622401 41789 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:53491 SSHKeyPath:/Users/ofidohubvm/.minikube/machines/minikube/id_rsa Username:docker}
I1026 21:43:47.328579 41789 ssh_runner.go:362] scp /Users/ofidohubvm/.minikube/machines/server.pem --> /etc/docker/server.pem (1192 bytes)
I1026 21:43:48.300298 41789 ssh_runner.go:362] scp /Users/ofidohubvm/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
I1026 21:43:49.486248 41789 ssh_runner.go:362] scp /Users/ofidohubvm/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1090 bytes)
I1026 21:43:50.035894 41789 provision.go:87] duration metric: took 5.461874111s to configureAuth
I1026 21:43:50.036023 41789 ubuntu.go:193] setting minikube options for container-runtime
I1026 21:43:50.037739 41789 config.go:182] Loaded profile config "minikube": Driver=docker, ContainerRuntime=docker, KubernetesVersion=v1.31.0
I1026 21:43:50.038178 41789 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" minikube
I1026 21:43:50.307657 41789 main.go:141] libmachine: Using SSH client type: native
I1026 21:43:50.309089 41789 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x105322dc0] 0x105325aa0 <nil> [] 0s} 127.0.0.1 53491 <nil> <nil>}
I1026 21:43:50.309104 41789 main.go:141] libmachine: About to run SSH command:
df --output=fstype / | tail -n 1
I1026 21:43:51.734591 41789 main.go:141] libmachine: SSH cmd err, output: <nil>: overlay
I1026 21:43:51.734624 41789 ubuntu.go:71] root file system type: overlay
I1026 21:43:51.749566 41789 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
I1026 21:43:51.751684 41789 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" minikube
I1026 21:43:51.995940 41789 main.go:141] libmachine: Using SSH client type: native
I1026 21:43:51.996431 41789 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x105322dc0] 0x105325aa0 <nil> [] 0s} 127.0.0.1 53491 <nil> <nil>}
I1026 21:43:51.997416 41789 main.go:141] libmachine: About to run SSH command:
sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
Description=Docker Application Container Engine
Documentation=https://docs.docker.com
BindsTo=containerd.service
After=network-online.target firewalld.service containerd.service
Wants=network-online.target
Requires=docker.socket
StartLimitBurst=3
StartLimitIntervalSec=60
[Service]
Type=notify
Restart=on-failure
# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
# The base configuration already specifies an 'ExecStart=...' command. The first directive
# here is to clear out that command inherited from the base configuration. Without this,
# the command from the base configuration and the command specified here are treated as
# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
# will catch this invalid input and refuse to start the service with an error like:
# Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
ExecStart=
ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=docker --insecure-registry 10.96.0.0/12
ExecReload=/bin/kill -s HUP \$MAINPID
# Having non-zero Limit*s causes performance problems due to accounting overhead
# in the kernel. We recommend using cgroups to do container-local accounting.
LimitNOFILE=infinity
LimitNPROC=infinity
LimitCORE=infinity
# Uncomment TasksMax if your systemd version supports it.
# Only systemd 226 and above support this version.
TasksMax=infinity
TimeoutStartSec=0
# set delegate yes so that systemd does not reset the cgroups of docker containers
Delegate=yes
# kill only the docker process, not all processes in the cgroup
KillMode=process
[Install]
WantedBy=multi-user.target
" | sudo tee /lib/systemd/system/docker.service.new
I1026 21:43:54.799998 41789 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
Description=Docker Application Container Engine
Documentation=https://docs.docker.com
BindsTo=containerd.service
After=network-online.target firewalld.service containerd.service
Wants=network-online.target
Requires=docker.socket
StartLimitBurst=3
StartLimitIntervalSec=60
[Service]
Type=notify
Restart=on-failure
# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
# The base configuration already specifies an 'ExecStart=...' command. The first directive
# here is to clear out that command inherited from the base configuration. Without this,
# the command from the base configuration and the command specified here are treated as
# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
# will catch this invalid input and refuse to start the service with an error like:
# Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
ExecStart=
ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=docker --insecure-registry 10.96.0.0/12
ExecReload=/bin/kill -s HUP $MAINPID
# Having non-zero Limit*s causes performance problems due to accounting overhead
# in the kernel. We recommend using cgroups to do container-local accounting.
LimitNOFILE=infinity
LimitNPROC=infinity
LimitCORE=infinity
# Uncomment TasksMax if your systemd version supports it.
# Only systemd 226 and above support this version.
TasksMax=infinity
TimeoutStartSec=0
# set delegate yes so that systemd does not reset the cgroups of docker containers
Delegate=yes
# kill only the docker process, not all processes in the cgroup
KillMode=process
[Install]
WantedBy=multi-user.target
I1026 21:43:54.816816 41789 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" minikube
I1026 21:43:55.191115 41789 main.go:141] libmachine: Using SSH client type: native
I1026 21:43:55.192537 41789 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x105322dc0] 0x105325aa0 <nil> [] 0s} 127.0.0.1 53491 <nil> <nil>}
I1026 21:43:55.192562 41789 main.go:141] libmachine: About to run SSH command:
sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
I1026 21:43:57.649006 41789 main.go:141] libmachine: SSH cmd err, output: <nil>:
I1026 21:43:57.666837 41789 machine.go:96] duration metric: took 30.015192658s to provisionDockerMachine
I1026 21:43:57.682018 41789 start.go:293] postStartSetup for "minikube" (driver="docker")
I1026 21:43:57.685397 41789 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
I1026 21:43:57.696736 41789 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
I1026 21:43:57.697451 41789 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" minikube
I1026 21:43:58.162290 41789 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:53491 SSHKeyPath:/Users/ofidohubvm/.minikube/machines/minikube/id_rsa Username:docker}
I1026 21:43:59.830218 41789 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (2.133267757s)
I1026 21:43:59.830847 41789 ssh_runner.go:195] Run: cat /etc/os-release
I1026 21:43:59.902257 41789 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
I1026 21:43:59.902368 41789 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
I1026 21:43:59.902414 41789 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
I1026 21:43:59.902426 41789 info.go:137] Remote host: Ubuntu 22.04.4 LTS
I1026 21:43:59.902600 41789 filesync.go:126] Scanning /Users/ofidohubvm/.minikube/addons for local assets ...
I1026 21:43:59.905211 41789 filesync.go:126] Scanning /Users/ofidohubvm/.minikube/files for local assets ...
I1026 21:43:59.906117 41789 start.go:296] duration metric: took 2.22404883s for postStartSetup
I1026 21:43:59.913328 41789 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
I1026 21:43:59.913614 41789 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" minikube
I1026 21:44:00.380506 41789 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:53491 SSHKeyPath:/Users/ofidohubvm/.minikube/machines/minikube/id_rsa Username:docker}
I1026 21:44:05.266645 41789 ssh_runner.go:235] Completed: sh -c "df -h /var | awk 'NR==2{print $5}'": (5.353193507s)
I1026 21:44:05.267378 41789 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
I1026 21:44:05.548203 41789 fix.go:56] duration metric: took 1m4.709757848s for fixHost
I1026 21:44:05.548273 41789 start.go:83] releasing machines lock for "minikube", held for 1m4.73612929s
I1026 21:44:05.549342 41789 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" minikube
I1026 21:44:05.792006 41789 ssh_runner.go:195] Run: cat /version.json
I1026 21:44:05.792393 41789 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" minikube
I1026 21:44:05.840467 41789 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
I1026 21:44:05.933562 41789 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" minikube
I1026 21:44:06.076735 41789 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:53491 SSHKeyPath:/Users/ofidohubvm/.minikube/machines/minikube/id_rsa Username:docker}
I1026 21:44:06.262597 41789 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:53491 SSHKeyPath:/Users/ofidohubvm/.minikube/machines/minikube/id_rsa Username:docker}
I1026 21:44:14.352297 41789 ssh_runner.go:235] Completed: curl -sS -m 2 https://registry.k8s.io/: (8.511680592s)
W1026 21:44:14.361364 41789 start.go:867] [curl -sS -m 2 https://registry.k8s.io/] failed: curl -sS -m 2 https://registry.k8s.io/: Process exited with status 28
stdout:
stderr:
curl: (28) Operation timed out after 2014 milliseconds with 0 bytes received
I1026 21:44:14.361614 41789 ssh_runner.go:235] Completed: cat /version.json: (8.569452686s)
I1026 21:44:14.399134 41789 ssh_runner.go:195] Run: systemctl --version
I1026 21:44:14.660253 41789 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
I1026 21:44:14.762013 41789 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
I1026 21:44:15.018143 41789 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
I1026 21:44:15.027030 41789 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
W1026 21:44:15.051851 41789 out.go:270] ❗ Failing to connect to https://registry.k8s.io/ from inside the minikube container
W1026 21:44:15.058459 41789 out.go:270] 💡 To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
I1026 21:44:15.105812 41789 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
I1026 21:44:15.105865 41789 start.go:495] detecting cgroup driver to use...
I1026 21:44:15.105902 41789 detect.go:187] detected "cgroupfs" cgroup driver on host os
I1026 21:44:15.445304 41789 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
" | sudo tee /etc/crictl.yaml"
I1026 21:44:15.985824 41789 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
I1026 21:44:16.079198 41789 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
I1026 21:44:16.343325 41789 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
I1026 21:44:16.343817 41789 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
I1026 21:44:16.549538 41789 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
I1026 21:44:16.601738 41789 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
I1026 21:44:16.690741 41789 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
I1026 21:44:16.757198 41789 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
I1026 21:44:16.913428 41789 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
I1026 21:44:16.991809 41789 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
I1026 21:44:17.174041 41789 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1 enable_unprivileged_ports = true|' /etc/containerd/config.toml"
I1026 21:44:17.273927 41789 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
I1026 21:44:17.368834 41789 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
I1026 21:44:17.397785 41789 ssh_runner.go:195] Run: sudo systemctl daemon-reload
I1026 21:44:17.984090 41789 ssh_runner.go:195] Run: sudo systemctl restart containerd
I1026 21:44:18.885869 41789 start.go:495] detecting cgroup driver to use...
I1026 21:44:18.887188 41789 detect.go:187] detected "cgroupfs" cgroup driver on host os
I1026 21:44:18.905714 41789 ssh_runner.go:195] Run: sudo systemctl cat docker.service
I1026 21:44:19.058183 41789 cruntime.go:279] skipping containerd shutdown because we are bound to it
I1026 21:44:19.063114 41789 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
I1026 21:44:19.142707 41789 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
" | sudo tee /etc/crictl.yaml"
I1026 21:44:19.245852 41789 ssh_runner.go:195] Run: which cri-dockerd
I1026 21:44:19.262519 41789 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
I1026 21:44:19.352369 41789 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
I1026 21:44:19.573080 41789 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
I1026 21:44:20.360234 41789 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
I1026 21:44:21.359033 41789 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
I1026 21:44:21.538885 41789 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
I1026 21:44:21.653711 41789 ssh_runner.go:195] Run: sudo systemctl daemon-reload
I1026 21:44:22.141009 41789 ssh_runner.go:195] Run: sudo systemctl restart docker
I1026 21:44:45.189389 41789 ssh_runner.go:235] Completed: sudo systemctl restart docker: (23.047255576s)
I1026 21:44:45.191500 41789 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.socket
I1026 21:44:46.063941 41789 ssh_runner.go:195] Run: sudo systemctl stop cri-docker.socket
I1026 21:44:46.336394 41789 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
I1026 21:44:46.677795 41789 ssh_runner.go:195] Run: sudo systemctl unmask cri-docker.socket
I1026 21:44:49.700813 41789 ssh_runner.go:235] Completed: sudo systemctl unmask cri-docker.socket: (3.022786611s)
I1026 21:44:49.701553 41789 ssh_runner.go:195] Run: sudo systemctl enable cri-docker.socket
I1026 21:44:50.626673 41789 ssh_runner.go:195] Run: sudo systemctl daemon-reload
I1026 21:44:51.577321 41789 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.socket
I1026 21:44:51.846942 41789 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
I1026 21:44:51.990342 41789 ssh_runner.go:195] Run: sudo systemctl daemon-reload
I1026 21:44:52.997606 41789 ssh_runner.go:235] Completed: sudo systemctl daemon-reload: (1.00723725s)
I1026 21:44:52.997952 41789 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.service
I1026 21:45:16.776227 41789 ssh_runner.go:235] Completed: sudo systemctl restart cri-docker.service: (23.777795939s)
I1026 21:45:16.785513 41789 start.go:542] Will wait 60s for socket path /var/run/cri-dockerd.sock
I1026 21:45:16.816377 41789 ssh_runner.go:195] Run: stat /var/run/cri-dockerd.sock
I1026 21:45:17.226738 41789 start.go:563] Will wait 60s for crictl version
I1026 21:45:17.228437 41789 ssh_runner.go:195] Run: which crictl
I1026 21:45:17.317383 41789 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
I1026 21:45:29.431813 41789 ssh_runner.go:235] Completed: sudo /usr/bin/crictl version: (12.114217132s)
I1026 21:45:29.433680 41789 start.go:579] Version: 0.1.0
RuntimeName: docker
RuntimeVersion: 27.2.0
RuntimeApiVersion: v1
I1026 21:45:29.434814 41789 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
I1026 21:45:33.695616 41789 ssh_runner.go:235] Completed: docker version --format {{.Server.Version}}: (4.260717726s)
I1026 21:45:33.704561 41789 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
I1026 21:45:34.615236 41789 out.go:235] 🐳 Preparing Kubernetes v1.31.0 on Docker 27.2.0 ...
I1026 21:45:34.734285 41789 cli_runner.go:164] Run: docker exec -t minikube dig +short host.docker.internal
I1026 21:45:38.120140 41789 cli_runner.go:217] Completed: docker exec -t minikube dig +short host.docker.internal: (3.385660649s)
I1026 21:45:38.122989 41789 network.go:96] got host ip for mount in container by digging dns: 192.168.65.254
I1026 21:45:38.128728 41789 ssh_runner.go:195] Run: grep 192.168.65.254 host.minikube.internal$ /etc/hosts
I1026 21:45:38.150833 41789 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.65.254 host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
I1026 21:45:38.682350 41789 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "8443/tcp") 0).HostPort}}'" minikube
I1026 21:45:38.762972 41789 kubeadm.go:883] updating cluster {Name:minikube KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.45@sha256:81df288595202a317b1a4dc2506ca2e4ed5f22373c19a441b88cfbf4b9867c85 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.0 ClusterName:minikube Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.0 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[dashboard:true default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/Users:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
I1026 21:45:38.781706 41789 preload.go:131] Checking if preload exists for k8s version v1.31.0 and runtime docker
I1026 21:45:38.782430 41789 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
I1026 21:45:40.322561 41789 ssh_runner.go:235] Completed: docker images --format {{.Repository}}:{{.Tag}}: (1.540062557s)
I1026 21:45:40.323320 41789 docker.go:685] Got preloaded images: -- stdout --
ofidohubvm/atoinium:latest
grafana/alloy:v1.4.2
registry.k8s.io/kube-scheduler:v1.31.0
registry.k8s.io/kube-apiserver:v1.31.0
registry.k8s.io/kube-controller-manager:v1.31.0
registry.k8s.io/kube-proxy:v1.31.0
registry.k8s.io/etcd:3.5.15-0
registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.13.0
quay.io/prometheus/node-exporter:v1.8.2
quay.io/sustainable_computing_io/kepler:release-0.7.11
ghcr.io/opencost/opencost:<none>
registry.k8s.io/pause:3.10
ghcr.io/jimmidyson/configmap-reload:v0.12.0
registry.k8s.io/coredns/coredns:v1.11.1
kubernetesui/dashboard:<none>
kubernetesui/metrics-scraper:<none>
gcr.io/k8s-minikube/storage-provisioner:v5
-- /stdout --
I1026 21:45:40.324117 41789 docker.go:615] Images already preloaded, skipping extraction
I1026 21:45:40.332396 41789 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
I1026 21:45:42.272082 41789 ssh_runner.go:235] Completed: docker images --format {{.Repository}}:{{.Tag}}: (1.938878399s)
I1026 21:45:42.272173 41789 docker.go:685] Got preloaded images: -- stdout --
ofidohubvm/atoinium:latest
grafana/alloy:v1.4.2
registry.k8s.io/kube-controller-manager:v1.31.0
registry.k8s.io/kube-apiserver:v1.31.0
registry.k8s.io/kube-scheduler:v1.31.0
registry.k8s.io/kube-proxy:v1.31.0
registry.k8s.io/etcd:3.5.15-0
registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.13.0
quay.io/prometheus/node-exporter:v1.8.2
quay.io/sustainable_computing_io/kepler:release-0.7.11
ghcr.io/opencost/opencost:<none>
registry.k8s.io/pause:3.10
ghcr.io/jimmidyson/configmap-reload:v0.12.0
registry.k8s.io/coredns/coredns:v1.11.1
kubernetesui/dashboard:<none>
kubernetesui/metrics-scraper:<none>
gcr.io/k8s-minikube/storage-provisioner:v5
-- /stdout --
I1026 21:45:42.318967 41789 cache_images.go:84] Images are preloaded, skipping loading
I1026 21:45:42.348554 41789 kubeadm.go:934] updating node { 192.168.49.2 8443 v1.31.0 docker true true} ...
I1026 21:45:42.537594 41789 kubeadm.go:946] kubelet [Unit]
Wants=docker.socket
[Service]
ExecStart=
ExecStart=/var/lib/minikube/binaries/v1.31.0/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=minikube --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
[Install]
config:
{KubernetesVersion:v1.31.0 ClusterName:minikube Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
I1026 21:45:42.601777 41789 ssh_runner.go:195] Run: docker info --format {{.CgroupDriver}}
I1026 21:46:05.555031 41789 ssh_runner.go:235] Completed: docker info --format {{.CgroupDriver}}: (22.952968514s)
I1026 21:46:05.731630 41789 cni.go:84] Creating CNI manager for ""
I1026 21:46:05.731881 41789 cni.go:158] "docker" driver + "docker" container runtime found on kubernetes v1.24+, recommending bridge
I1026 21:46:05.789949 41789 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
I1026 21:46:05.824132 41789 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.31.0 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:minikube NodeName:minikube DNSDomain:cluster.local CRISocket:/var/run/cri-dockerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/cri-dockerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
I1026 21:46:05.925663 41789 kubeadm.go:187] kubeadm config:
apiVersion: kubeadm.k8s.io/v1beta3
kind: InitConfiguration
localAPIEndpoint:
advertiseAddress: 192.168.49.2
bindPort: 8443
bootstrapTokens:
- groups:
- system:bootstrappers:kubeadm:default-node-token
ttl: 24h0m0s
usages:
- signing
- authentication
nodeRegistration:
criSocket: unix:///var/run/cri-dockerd.sock
name: "minikube"
kubeletExtraArgs:
node-ip: 192.168.49.2
taints: []
---
apiVersion: kubeadm.k8s.io/v1beta3
kind: ClusterConfiguration
apiServer:
certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
extraArgs:
enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
controllerManager:
extraArgs:
allocate-node-cidrs: "true"
leader-elect: "false"
scheduler:
extraArgs:
leader-elect: "false"
certificatesDir: /var/lib/minikube/certs
clusterName: mk
controlPlaneEndpoint: control-plane.minikube.internal:8443
etcd:
local:
dataDir: /var/lib/minikube/etcd
extraArgs:
proxy-refresh-interval: "70000"
kubernetesVersion: v1.31.0
networking:
dnsDomain: cluster.local
podSubnet: "10.244.0.0/16"
serviceSubnet: 10.96.0.0/12
---
apiVersion: kubelet.config.k8s.io/v1beta1
kind: KubeletConfiguration
authentication:
x509:
clientCAFile: /var/lib/minikube/certs/ca.crt
cgroupDriver: cgroupfs
containerRuntimeEndpoint: unix:///var/run/cri-dockerd.sock
hairpinMode: hairpin-veth
runtimeRequestTimeout: 15m
clusterDomain: "cluster.local"
# disable disk resource management by default
imageGCHighThresholdPercent: 100
evictionHard:
nodefs.available: "0%"
nodefs.inodesFree: "0%"
imagefs.available: "0%"
failSwapOn: false
staticPodPath: /etc/kubernetes/manifests
---
apiVersion: kubeproxy.config.k8s.io/v1alpha1
kind: KubeProxyConfiguration
clusterCIDR: "10.244.0.0/16"
metricsBindAddress: 0.0.0.0:10249
conntrack:
maxPerCore: 0
# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
tcpEstablishedTimeout: 0s
# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
tcpCloseWaitTimeout: 0s
I1026 21:46:05.932013 41789 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.0
I1026 21:46:06.530903 41789 binaries.go:44] Found k8s binaries, skipping transfer
I1026 21:46:06.533501 41789 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
I1026 21:46:06.734807 41789 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (307 bytes)
I1026 21:46:06.963335 41789 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
I1026 21:46:07.203441 41789 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2150 bytes)
I1026 21:46:07.348234 41789 ssh_runner.go:195] Run: grep 192.168.49.2 control-plane.minikube.internal$ /etc/hosts
I1026 21:46:07.370586 41789 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.2 control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
I1026 21:46:07.520075 41789 ssh_runner.go:195] Run: sudo systemctl daemon-reload
I1026 21:46:08.682717 41789 ssh_runner.go:235] Completed: sudo systemctl daemon-reload: (1.162614372s)
I1026 21:46:08.682974 41789 ssh_runner.go:195] Run: sudo systemctl start kubelet
I1026 21:46:08.910669 41789 certs.go:68] Setting up /Users/ofidohubvm/.minikube/profiles/minikube for IP: 192.168.49.2
I1026 21:46:08.910695 41789 certs.go:194] generating shared ca certs ...
I1026 21:46:08.935147 41789 certs.go:226] acquiring lock for ca certs: {Name:mkcb616ed4343ea13583d13d67d06f3fd74c50f8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1026 21:46:09.207064 41789 certs.go:235] skipping valid "minikubeCA" ca cert: /Users/ofidohubvm/.minikube/ca.key
I1026 21:46:09.269448 41789 certs.go:235] skipping valid "proxyClientCA" ca cert: /Users/ofidohubvm/.minikube/proxy-client-ca.key
I1026 21:46:09.269491 41789 certs.go:256] generating profile certs ...
I1026 21:46:09.291240 41789 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /Users/ofidohubvm/.minikube/profiles/minikube/client.key
I1026 21:46:09.325394 41789 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /Users/ofidohubvm/.minikube/profiles/minikube/apiserver.key.7fb57e3c
I1026 21:46:09.337447 41789 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /Users/ofidohubvm/.minikube/profiles/minikube/proxy-client.key
I1026 21:46:09.344115 41789 certs.go:484] found cert: /Users/ofidohubvm/.minikube/certs/ca-key.pem (1679 bytes)
I1026 21:46:09.348751 41789 certs.go:484] found cert: /Users/ofidohubvm/.minikube/certs/ca.pem (1090 bytes)
I1026 21:46:09.352750 41789 certs.go:484] found cert: /Users/ofidohubvm/.minikube/certs/cert.pem (1131 bytes)
I1026 21:46:09.353268 41789 certs.go:484] found cert: /Users/ofidohubvm/.minikube/certs/key.pem (1679 bytes)
I1026 21:46:10.365896 41789 ssh_runner.go:362] scp /Users/ofidohubvm/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
I1026 21:46:10.957447 41789 ssh_runner.go:362] scp /Users/ofidohubvm/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
I1026 21:46:11.885877 41789 ssh_runner.go:362] scp /Users/ofidohubvm/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
I1026 21:46:12.433462 41789 ssh_runner.go:362] scp /Users/ofidohubvm/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
I1026 21:46:12.714811 41789 ssh_runner.go:362] scp /Users/ofidohubvm/.minikube/profiles/minikube/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1411 bytes)
I1026 21:46:12.971341 41789 ssh_runner.go:362] scp /Users/ofidohubvm/.minikube/profiles/minikube/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
I1026 21:46:13.523526 41789 ssh_runner.go:362] scp /Users/ofidohubvm/.minikube/profiles/minikube/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
I1026 21:46:13.858237 41789 ssh_runner.go:362] scp /Users/ofidohubvm/.minikube/profiles/minikube/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
I1026 21:46:14.137311 41789 ssh_runner.go:362] scp /Users/ofidohubvm/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
I1026 21:46:14.296791 41789 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (740 bytes)
I1026 21:46:14.637757 41789 ssh_runner.go:195] Run: openssl version
I1026 21:46:15.639004 41789 ssh_runner.go:235] Completed: openssl version: (1.001044728s)
I1026 21:46:15.640185 41789 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
I1026 21:46:16.055394 41789 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
I1026 21:46:16.084414 41789 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Oct 24 22:40 /usr/share/ca-certificates/minikubeCA.pem
I1026 21:46:16.084716 41789 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
I1026 21:46:16.124270 41789 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
I1026 21:46:16.243940 41789 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
I1026 21:46:16.297535 41789 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
I1026 21:46:16.337126 41789 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
I1026 21:46:16.476968 41789 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
I1026 21:46:16.565001 41789 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
I1026 21:46:16.747622 41789 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
I1026 21:46:16.872291 41789 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
I1026 21:46:16.981482 41789 kubeadm.go:392] StartCluster: {Name:minikube KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.45@sha256:81df288595202a317b1a4dc2506ca2e4ed5f22373c19a441b88cfbf4b9867c85 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.0 ClusterName:minikube Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.0 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[dashboard:true default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/Users:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
I1026 21:46:17.036031 41789 ssh_runner.go:195] Run: docker ps --filter status=paused --filter=name=k8s_.*_(kube-system)_ --format={{.ID}}
I1026 21:46:18.858970 41789 ssh_runner.go:235] Completed: docker ps --filter status=paused --filter=name=k8s_.*_(kube-system)_ --format={{.ID}}: (1.822868999s)
I1026 21:46:18.860173 41789 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
I1026 21:46:19.140845 41789 kubeadm.go:408] found existing configuration files, will attempt cluster restart
I1026 21:46:19.149284 41789 kubeadm.go:593] restartPrimaryControlPlane start ...
I1026 21:46:19.149694 41789 ssh_runner.go:195] Run: sudo test -d /data/minikube
I1026 21:46:19.471292 41789 kubeadm.go:130] /data/minikube skipping compat symlinks: sudo test -d /data/minikube: Process exited with status 1
stdout:
stderr:
I1026 21:46:19.471970 41789 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "8443/tcp") 0).HostPort}}'" minikube
I1026 21:46:19.672338 41789 kubeconfig.go:47] verify endpoint returned: get endpoint: "minikube" does not appear in /Users/ofidohubvm/.kube/config
I1026 21:46:19.674988 41789 kubeconfig.go:62] /Users/ofidohubvm/.kube/config needs updating (will repair): [kubeconfig missing "minikube" cluster setting kubeconfig missing "minikube" context setting]
I1026 21:46:19.681855 41789 lock.go:35] WriteFile acquiring /Users/ofidohubvm/.kube/config: {Name:mkaedac47e01bbe58cae15af0bafd068b00af33e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1026 21:46:20.845980 41789 ssh_runner.go:195] Run: sudo diff -u /var/tmp/minikube/kubeadm.yaml /var/tmp/minikube/kubeadm.yaml.new
I1026 21:46:20.907654 41789 kubeadm.go:630] The running cluster does not require reconfiguration: 127.0.0.1
I1026 21:46:20.962429 41789 kubeadm.go:597] duration metric: took 1.759902387s to restartPrimaryControlPlane
I1026 21:46:20.962901 41789 kubeadm.go:394] duration metric: took 3.982838568s to StartCluster
I1026 21:46:20.969444 41789 settings.go:142] acquiring lock: {Name:mk8822567fee0c78aaf4cf60d3959fbb141359b1 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1026 21:46:20.972636 41789 settings.go:150] Updating kubeconfig: /Users/ofidohubvm/.kube/config
I1026 21:46:21.012778 41789 lock.go:35] WriteFile acquiring /Users/ofidohubvm/.kube/config: {Name:mkaedac47e01bbe58cae15af0bafd068b00af33e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I1026 21:46:21.016330 41789 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.0 ContainerRuntime:docker ControlPlane:true Worker:true}
I1026 21:46:21.017519 41789 config.go:182] Loaded profile config "minikube": Driver=docker, ContainerRuntime=docker, KubernetesVersion=v1.31.0
I1026 21:46:21.017427 41789 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:true default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
I1026 21:46:21.018588 41789 addons.go:69] Setting dashboard=true in profile "minikube"
I1026 21:46:21.018591 41789 addons.go:69] Setting storage-provisioner=true in profile "minikube"
I1026 21:46:21.018822 41789 addons.go:234] Setting addon dashboard=true in "minikube"
W1026 21:46:21.018833 41789 addons.go:243] addon dashboard should already be in state true
I1026 21:46:21.019031 41789 addons.go:234] Setting addon storage-provisioner=true in "minikube"
W1026 21:46:21.019041 41789 addons.go:243] addon storage-provisioner should already be in state true
I1026 21:46:21.026228 41789 host.go:66] Checking if "minikube" exists ...
I1026 21:46:21.026188 41789 host.go:66] Checking if "minikube" exists ...
I1026 21:46:21.026544 41789 out.go:177] 🔎 Verifying Kubernetes components...
I1026 21:46:21.027487 41789 addons.go:69] Setting default-storageclass=true in profile "minikube"
I1026 21:46:21.029566 41789 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "minikube"
I1026 21:46:21.042858 41789 ssh_runner.go:195] Run: sudo systemctl daemon-reload
I1026 21:46:21.081167 41789 cli_runner.go:164] Run: docker container inspect minikube --format={{.State.Status}}
I1026 21:46:21.081850 41789 cli_runner.go:164] Run: docker container inspect minikube --format={{.State.Status}}
I1026 21:46:21.081981 41789 cli_runner.go:164] Run: docker container inspect minikube --format={{.State.Status}}
I1026 21:46:21.346803 41789 out.go:177] ▪ Using image gcr.io/k8s-minikube/storage-provisioner:v5
I1026 21:46:21.370891 41789 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
I1026 21:46:21.370913 41789 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
I1026 21:46:21.371157 41789 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" minikube
I1026 21:46:21.371269 41789 addons.go:234] Setting addon default-storageclass=true in "minikube"
W1026 21:46:21.371282 41789 addons.go:243] addon default-storageclass should already be in state true
I1026 21:46:21.371308 41789 host.go:66] Checking if "minikube" exists ...
I1026 21:46:21.377651 41789 cli_runner.go:164] Run: docker container inspect minikube --format={{.State.Status}}
I1026 21:46:21.426256 41789 out.go:177] ▪ Using image docker.io/kubernetesui/dashboard:v2.7.0
I1026 21:46:21.453670 41789 out.go:177] ▪ Using image docker.io/kubernetesui/metrics-scraper:v1.0.8
I1026 21:46:21.467122 41789 addons.go:431] installing /etc/kubernetes/addons/dashboard-ns.yaml
I1026 21:46:21.467138 41789 ssh_runner.go:362] scp dashboard/dashboard-ns.yaml --> /etc/kubernetes/addons/dashboard-ns.yaml (759 bytes)
I1026 21:46:21.468277 41789 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" minikube
I1026 21:46:21.560906 41789 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:53491 SSHKeyPath:/Users/ofidohubvm/.minikube/machines/minikube/id_rsa Username:docker}
I1026 21:46:21.568147 41789 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
I1026 21:46:21.568166 41789 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
I1026 21:46:21.568993 41789 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" minikube
I1026 21:46:21.571344 41789 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:53491 SSHKeyPath:/Users/ofidohubvm/.minikube/machines/minikube/id_rsa Username:docker}
I1026 21:46:21.671961 41789 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:53491 SSHKeyPath:/Users/ofidohubvm/.minikube/machines/minikube/id_rsa Username:docker}
I1026 21:46:22.170785 41789 ssh_runner.go:235] Completed: sudo systemctl daemon-reload: (1.127893838s)
I1026 21:46:22.171046 41789 ssh_runner.go:195] Run: sudo systemctl start kubelet
I1026 21:46:22.536271 41789 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "8443/tcp") 0).HostPort}}'" minikube
I1026 21:46:22.638812 41789 api_server.go:52] waiting for apiserver process to appear ...
I1026 21:46:22.667643 41789 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I1026 21:46:22.835567 41789 addons.go:431] installing /etc/kubernetes/addons/dashboard-clusterrole.yaml
I1026 21:46:22.835581 41789 ssh_runner.go:362] scp dashboard/dashboard-clusterrole.yaml --> /etc/kubernetes/addons/dashboard-clusterrole.yaml (1001 bytes)
I1026 21:46:22.977334 41789 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
I1026 21:46:23.019080 41789 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
I1026 21:46:23.157497 41789 addons.go:431] installing /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml
I1026 21:46:23.157513 41789 ssh_runner.go:362] scp dashboard/dashboard-clusterrolebinding.yaml --> /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml (1018 bytes)
I1026 21:46:23.169239 41789 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I1026 21:46:23.492502 41789 addons.go:431] installing /etc/kubernetes/addons/dashboard-configmap.yaml
I1026 21:46:23.492534 41789 ssh_runner.go:362] scp dashboard/dashboard-configmap.yaml --> /etc/kubernetes/addons/dashboard-configmap.yaml (837 bytes)
I1026 21:46:23.821414 41789 addons.go:431] installing /etc/kubernetes/addons/dashboard-dp.yaml
I1026 21:46:23.821432 41789 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/dashboard-dp.yaml (4288 bytes)
I1026 21:46:24.086037 41789 addons.go:431] installing /etc/kubernetes/addons/dashboard-role.yaml
I1026 21:46:24.086053 41789 ssh_runner.go:362] scp dashboard/dashboard-role.yaml --> /etc/kubernetes/addons/dashboard-role.yaml (1724 bytes)
I1026 21:46:24.592969 41789 addons.go:431] installing /etc/kubernetes/addons/dashboard-rolebinding.yaml
I1026 21:46:24.593080 41789 ssh_runner.go:362] scp dashboard/dashboard-rolebinding.yaml --> /etc/kubernetes/addons/dashboard-rolebinding.yaml (1046 bytes)
I1026 21:46:24.700850 41789 addons.go:431] installing /etc/kubernetes/addons/dashboard-sa.yaml
I1026 21:46:24.700872 41789 ssh_runner.go:362] scp dashboard/dashboard-sa.yaml --> /etc/kubernetes/addons/dashboard-sa.yaml (837 bytes)
I1026 21:46:24.834242 41789 addons.go:431] installing /etc/kubernetes/addons/dashboard-secret.yaml
I1026 21:46:24.834263 41789 ssh_runner.go:362] scp dashboard/dashboard-secret.yaml --> /etc/kubernetes/addons/dashboard-secret.yaml (1389 bytes)
I1026 21:46:25.114723 41789 addons.go:431] installing /etc/kubernetes/addons/dashboard-svc.yaml
I1026 21:46:25.114738 41789 ssh_runner.go:362] scp dashboard/dashboard-svc.yaml --> /etc/kubernetes/addons/dashboard-svc.yaml (1294 bytes)
I1026 21:46:25.254930 41789 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml
I1026 21:46:52.471873 41789 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: (29.452632479s)
I1026 21:46:52.471828 41789 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (29.493326791s)
I1026 21:46:52.472130 41789 ssh_runner.go:235] Completed: sudo pgrep -xnf kube-apiserver.*minikube.*: (29.302750535s)
W1026 21:46:52.472200 41789 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
W1026 21:46:52.472249 41789 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:46:52.472935 41789 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I1026 21:46:52.473963 41789 retry.go:31] will retry after 153.082786ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:46:52.473992 41789 retry.go:31] will retry after 366.853928ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:46:52.590825 41789 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: (27.335740035s)
W1026 21:46:52.590911 41789 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
stdout:
stderr:
error validating "/etc/kubernetes/addons/dashboard-ns.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrole.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-configmap.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-dp.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-role.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-rolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-sa.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-secret.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-svc.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:46:52.591512 41789 retry.go:31] will retry after 337.216451ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
stdout:
stderr:
error validating "/etc/kubernetes/addons/dashboard-ns.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrole.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-configmap.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-dp.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-role.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-rolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-sa.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-secret.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-svc.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:46:52.628274 41789 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml
I1026 21:46:52.841974 41789 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml
I1026 21:46:52.953295 41789 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml
I1026 21:46:54.586206 41789 ssh_runner.go:235] Completed: sudo pgrep -xnf kube-apiserver.*minikube.*: (2.113073182s)
I1026 21:46:54.586830 41789 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I1026 21:46:58.091935 41789 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: (5.462965151s)
W1026 21:46:58.092446 41789 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:46:58.093021 41789 retry.go:31] will retry after 325.023671ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:46:58.096285 41789 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: (5.142207518s)
W1026 21:46:58.097233 41789 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
stdout:
stderr:
error validating "/etc/kubernetes/addons/dashboard-ns.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrole.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-configmap.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-dp.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-role.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-rolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-sa.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-secret.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-svc.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:46:58.097322 41789 retry.go:31] will retry after 496.274512ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
stdout:
stderr:
error validating "/etc/kubernetes/addons/dashboard-ns.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrole.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-configmap.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-dp.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-role.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-rolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-sa.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-secret.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-svc.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:46:58.102988 41789 ssh_runner.go:235] Completed: sudo pgrep -xnf kube-apiserver.*minikube.*: (3.516090779s)
I1026 21:46:58.102821 41789 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: (5.260801511s)
W1026 21:46:58.103615 41789 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:46:58.103666 41789 retry.go:31] will retry after 357.744859ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:46:58.106006 41789 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I1026 21:46:58.206217 41789 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I1026 21:46:58.419174 41789 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml
I1026 21:46:58.461949 41789 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml
I1026 21:46:58.595073 41789 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml
I1026 21:46:58.667510 41789 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I1026 21:47:02.613858 41789 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: (4.194411862s)
W1026 21:47:02.614578 41789 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:47:02.614896 41789 retry.go:31] will retry after 747.093376ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:47:02.755541 41789 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: (4.293195749s)
W1026 21:47:02.755716 41789 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:47:02.755792 41789 retry.go:31] will retry after 485.256991ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:47:02.758725 41789 ssh_runner.go:235] Completed: sudo pgrep -xnf kube-apiserver.*minikube.*: (4.091160924s)
I1026 21:47:02.759280 41789 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I1026 21:47:02.760168 41789 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: (4.163443182s)
W1026 21:47:02.760261 41789 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
stdout:
stderr:
error validating "/etc/kubernetes/addons/dashboard-ns.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrole.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-configmap.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-dp.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-role.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-rolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-sa.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-secret.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-svc.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:47:02.760300 41789 retry.go:31] will retry after 767.787786ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
stdout:
stderr:
error validating "/etc/kubernetes/addons/dashboard-ns.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrole.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-configmap.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-dp.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-role.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-rolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-sa.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-secret.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-svc.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:47:03.190820 41789 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I1026 21:47:03.339508 41789 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml
I1026 21:47:03.386367 41789 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml
I1026 21:47:03.590469 41789 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml
I1026 21:47:05.635562 41789 ssh_runner.go:235] Completed: sudo pgrep -xnf kube-apiserver.*minikube.*: (2.444556875s)
I1026 21:47:05.636381 41789 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I1026 21:47:11.227689 41789 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: (7.887939657s)
I1026 21:47:11.233590 41789 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: (7.842518592s)
W1026 21:47:11.234741 41789 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
W1026 21:47:11.234608 41789 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:47:11.235566 41789 retry.go:31] will retry after 422.16724ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:47:11.236261 41789 retry.go:31] will retry after 469.354087ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:47:11.237164 41789 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: (7.645327955s)
W1026 21:47:11.237656 41789 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
stdout:
stderr:
error validating "/etc/kubernetes/addons/dashboard-ns.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrole.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-configmap.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-dp.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-role.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-rolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-sa.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-secret.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-svc.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:47:11.237885 41789 retry.go:31] will retry after 982.368048ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
stdout:
stderr:
error validating "/etc/kubernetes/addons/dashboard-ns.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrole.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-configmap.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-dp.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-role.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-rolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-sa.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-secret.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-svc.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:47:11.239532 41789 ssh_runner.go:235] Completed: sudo pgrep -xnf kube-apiserver.*minikube.*: (5.603026484s)
I1026 21:47:11.275078 41789 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I1026 21:47:11.664201 41789 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml
I1026 21:47:11.738411 41789 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml
I1026 21:47:12.221389 41789 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml
I1026 21:47:12.821893 41789 ssh_runner.go:235] Completed: sudo pgrep -xnf kube-apiserver.*minikube.*: (1.546607617s)
I1026 21:47:12.823662 41789 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I1026 21:47:18.118489 41789 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: (6.379883604s)
W1026 21:47:18.118914 41789 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:47:18.119185 41789 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: (6.454817351s)
I1026 21:47:18.119194 41789 retry.go:31] will retry after 1.178875566s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
W1026 21:47:18.119297 41789 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:47:18.119341 41789 retry.go:31] will retry after 1.371568573s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:47:18.121906 41789 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: (5.900319902s)
W1026 21:47:18.122126 41789 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
stdout:
stderr:
error validating "/etc/kubernetes/addons/dashboard-ns.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrole.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-configmap.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-dp.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-role.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-rolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-sa.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-secret.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-svc.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:47:18.122182 41789 ssh_runner.go:235] Completed: sudo pgrep -xnf kube-apiserver.*minikube.*: (5.298482285s)
I1026 21:47:18.122351 41789 retry.go:31] will retry after 1.314315524s: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
stdout:
stderr:
error validating "/etc/kubernetes/addons/dashboard-ns.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrole.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-clusterrolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-configmap.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-dp.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-role.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-rolebinding.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-sa.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-secret.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
error validating "/etc/kubernetes/addons/dashboard-svc.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
I1026 21:47:18.123126 41789 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I1026 21:47:18.472316 41789 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I1026 21:47:19.299794 41789 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml
I1026 21:47:19.437762 41789 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml
I1026 21:47:19.492289 41789 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml
I1026 21:47:21.782219 41789 ssh_runner.go:235] Completed: sudo pgrep -xnf kube-apiserver.*minikube.*: (3.309498241s)
I1026 21:47:21.961964 41789 ssh_runner.go:195] Run: docker ps -a --filter=name=k8s_kube-apiserver --format={{.ID}}
I1026 21:48:06.697581 41789 ssh_runner.go:235] Completed: docker ps -a --filter=name=k8s_kube-apiserver --format={{.ID}}: (44.735415241s)
I1026 21:48:06.729957 41789 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: (47.176857798s)
I1026 21:48:06.730122 41789 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: (47.232495393s)
I1026 21:48:06.729904 41789 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: (47.368171212s)
I1026 21:48:06.729594 41789 logs.go:276] 2 containers: [35d9f41e5931 b8a4e3c30a15]
I1026 21:48:06.735117 41789 ssh_runner.go:195] Run: docker ps -a --filter=name=k8s_etcd --format={{.ID}}
W1026 21:48:06.793041 41789 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storageclass.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storageclass.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
W1026 21:48:06.793240 41789 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/storage-provisioner.yaml: Process exited with status 1
stdout:
stderr:
error: error validating "/etc/kubernetes/addons/storage-provisioner.yaml": error validating data: failed to download openapi: Get "https://localhost:8443/openapi/v2?timeout=32s": dial tcp [::1]:8443: connect: connection refused; if you choose to ignore these errors, turn validation off with --validate=false
W1026 21:48:06.793407 41789 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.0/kubectl apply --force -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: Process exited with status 1
stdout: