generated from jx3-gitops-repositories/jx3-minikube
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlogs.txt
691 lines (675 loc) · 156 KB
/
logs.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
*
* ==> Audit <==
* |---------|----------------------------------------------------|----------|-------------------------------|---------|---------------------|---------------------|
| Command | Args | Profile | User | Version | Start Time | End Time |
|---------|----------------------------------------------------|----------|-------------------------------|---------|---------------------|---------------------|
| kubectl | get namespaces | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 04 Mar 24 12:09 WAT | 04 Mar 24 12:12 WAT |
| kubectl | get namespaces | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 04 Mar 24 12:12 WAT | 04 Mar 24 12:12 WAT |
| help | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 04 Mar 24 12:17 WAT | 04 Mar 24 12:17 WAT |
| start | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 04 Mar 24 14:17 WAT | 04 Mar 24 14:18 WAT |
| kubectl | get pods | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 04 Mar 24 16:50 WAT | 04 Mar 24 16:50 WAT |
| kubectl | get pods | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 04 Mar 24 17:29 WAT | 04 Mar 24 17:29 WAT |
| kubectl | describe | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 04 Mar 24 17:29 WAT | |
| | jx-boot-4b3a30e3-c4e3-4ec6-8ac5-0d7c3206434f-g7wg2 | | | | | |
| kubectl | get pods | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 04 Mar 24 17:30 WAT | 04 Mar 24 17:30 WAT |
| kubectl | describe | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 04 Mar 24 17:30 WAT | |
| | jx-boot-4b3a30e3-c4e3-4ec6-8ac5-0d7c3206434f-g7wg2 | | | | | |
| kubectl | describe | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 04 Mar 24 17:30 WAT | |
| | jx-boot-4b3a30e3-c4e3-4ec6-8ac5-0d7c3206434f-g7wg2 | | | | | |
| kubectl | describe pod | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 04 Mar 24 17:31 WAT | 04 Mar 24 17:31 WAT |
| | jx-boot-4b3a30e3-c4e3-4ec6-8ac5-0d7c3206434f-g7wg2 | | | | | |
| kubectl | describe pod | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 04 Mar 24 17:33 WAT | |
| | jx-boot-4b3a30e3-c4e3-4ec6-8ac5-0d7c3206434f-g7wg2 | | | | | |
| kubectl | get pods | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 04 Mar 24 17:33 WAT | 04 Mar 24 17:33 WAT |
| stop | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 04 Mar 24 17:35 WAT | 04 Mar 24 17:36 WAT |
| start | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 14:35 WAT | |
| start | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 14:36 WAT | 05 Mar 24 14:37 WAT |
| kubectl | -- get pods -A | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 14:38 WAT | 05 Mar 24 14:38 WAT |
| start | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 15:10 WAT | 05 Mar 24 15:11 WAT |
| kubectl | get pods -- -A | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 16:08 WAT | 05 Mar 24 16:08 WAT |
| delete | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 17:05 WAT | 05 Mar 24 17:05 WAT |
| start | --cpus 4 --memory | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 17:06 WAT | 05 Mar 24 17:13 WAT |
| | 6048 --disk-size=100g | | | | | |
| | --addons=ingress | | | | | |
| | --kubernetes-version=1.24 | | | | | |
| ip | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 17:14 WAT | 05 Mar 24 17:14 WAT |
| ip | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 17:15 WAT | 05 Mar 24 17:15 WAT |
| ip | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 17:16 WAT | 05 Mar 24 17:16 WAT |
| ip | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 17:17 WAT | 05 Mar 24 17:17 WAT |
| ip | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 17:18 WAT | 05 Mar 24 17:18 WAT |
| ip | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 17:18 WAT | 05 Mar 24 17:18 WAT |
| ip | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 17:27 WAT | 05 Mar 24 17:27 WAT |
| kubectl | get pods -- -A | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 17:33 WAT | 05 Mar 24 17:33 WAT |
| ssh | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 17:40 WAT | |
| ssh | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 17:46 WAT | 05 Mar 24 17:46 WAT |
| ssh | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 17:46 WAT | |
| delete | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 05 Mar 24 17:49 WAT | 05 Mar 24 17:49 WAT |
| start | --cpus 4 --memory | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 09:29 WAT | 06 Mar 24 09:37 WAT |
| | 6048 --disk-size=100g | | | | | |
| | --addons=ingress | | | | | |
| | --kubernetes-version=1.24 | | | | | |
| ip | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 09:39 WAT | 06 Mar 24 09:39 WAT |
| kubectl | get pods -- -A | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 10:01 WAT | 06 Mar 24 10:01 WAT |
| kubectl | logs jx-git-operator | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 10:02 WAT | |
| | jx-git-operator-5cb6998cc5-9s7hg | | | | | |
| ssh | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 10:03 WAT | |
| kubectl | get pods | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 10:07 WAT | 06 Mar 24 10:07 WAT |
| kubectl | get pods -- -A | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 10:07 WAT | 06 Mar 24 10:07 WAT |
| kubectl | get pods -- -A | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 10:08 WAT | 06 Mar 24 10:08 WAT |
| ssh | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 10:08 WAT | |
| kubectl | get pods -- -A | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 10:10 WAT | 06 Mar 24 10:10 WAT |
| ssh | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 10:10 WAT | 06 Mar 24 10:24 WAT |
| kubectl | logs pod | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 10:24 WAT | |
| | jx-boot-d4960258-2c87-4b3a-87df-b044dae40519 | | | | | |
| kubectl | logs | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 10:24 WAT | |
| | jx-boot-d4960258-2c87-4b3a-87df-b044dae40519 | | | | | |
| kubectl | get pods -- -A | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 10:25 WAT | 06 Mar 24 10:25 WAT |
| kubectl | describe pod | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 10:25 WAT | 06 Mar 24 10:25 WAT |
| | jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s | | | | | |
| kubectl | describe pod | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 10:26 WAT | 06 Mar 24 10:26 WAT |
| | jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s | | | | | |
| ssh | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 10:26 WAT | |
| delete | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 14:22 WAT | 06 Mar 24 14:22 WAT |
| start | --cpus 4 --memory | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 14:23 WAT | |
| | 6048 --disk-size=100g | | | | | |
| | --addons=ingress | | | | | |
| | --kubernetes-version=1.24 | | | | | |
| start | --cpus 4 --memory | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 14:24 WAT | |
| | 6048 --disk-size=100g | | | | | |
| | --addons=ingress | | | | | |
| | --kubernetes-version=1.24 | | | | | |
| start | --cpus 4 --memory | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 14:25 WAT | |
| | 6048 --disk-size=100g | | | | | |
| | --addons=ingress | | | | | |
| | --kubernetes-version=1.24 | | | | | |
| delete | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 14:25 WAT | 06 Mar 24 14:25 WAT |
| start | --cpus 4 --memory | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 14:26 WAT | 06 Mar 24 14:27 WAT |
| | 6048 --disk-size=100g | | | | | |
| | --addons=ingress | | | | | |
| | --kubernetes-version=1.24 | | | | | |
| ip | | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 14:27 WAT | 06 Mar 24 14:27 WAT |
| kubectl | cluster-info -- | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 14:33 WAT | |
| start | --cpus 4 --memory | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 14:35 WAT | |
| | 6048 --disk-size=100g | | | | | |
| | --addons=ingress | | | | | |
| | --kubernetes-version=1.24 | | | | | |
| start | --cpus 4 --memory | minikube | DESKTOP-NVF9D9Q\Administrator | v1.32.0 | 06 Mar 24 14:36 WAT | |
| | 6048 --disk-size=100g | | | | | |
| | --addons=ingress | | | | | |
| | --kubernetes-version=1.24 | | | | | |
|---------|----------------------------------------------------|----------|-------------------------------|---------|---------------------|---------------------|
*
* ==> Last Start <==
* Log file created at: 2024/03/06 14:36:13
Running on machine: DESKTOP-NVF9D9Q
Binary: Built with gc go1.21.3 for windows/amd64
Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
I0306 14:36:13.298834 3968 out.go:296] Setting OutFile to fd 84 ...
I0306 14:36:13.299831 3968 out.go:343] TERM=,COLORTERM=, which probably does not support color
I0306 14:36:13.299831 3968 out.go:309] Setting ErrFile to fd 88...
I0306 14:36:13.299831 3968 out.go:343] TERM=,COLORTERM=, which probably does not support color
I0306 14:36:13.315225 3968 out.go:303] Setting JSON to false
I0306 14:36:13.319168 3968 start.go:128] hostinfo: {"hostname":"DESKTOP-NVF9D9Q","uptime":86815,"bootTime":1709645358,"procs":289,"os":"windows","platform":"Microsoft Windows 10 Pro","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.4046 Build 19045.4046","kernelVersion":"10.0.19045.4046 Build 19045.4046","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a2cec842-37d4-4b9f-ad7e-86d9a4510d55"}
W0306 14:36:13.319168 3968 start.go:136] gopshost.Virtualization returned error: not implemented yet
I0306 14:36:13.320805 3968 out.go:177] * minikube v1.32.0 on Microsoft Windows 10 Pro 10.0.19045.4046 Build 19045.4046
I0306 14:36:13.322973 3968 notify.go:220] Checking for updates...
I0306 14:36:13.323554 3968 config.go:182] Loaded profile config "minikube": Driver=docker, ContainerRuntime=docker, KubernetesVersion=v1.24.17
I0306 14:36:13.325288 3968 out.go:177] Using Kubernetes 1.24.17 since patch version was unspecified
I0306 14:36:13.326862 3968 out.go:177] * Kubernetes 1.28.3 is now available. If you would like to upgrade, specify: --kubernetes-version=v1.28.3
I0306 14:36:13.328507 3968 driver.go:378] Setting default libvirt URI to qemu:///system
I0306 14:36:13.486795 3968 docker.go:122] docker version: linux-25.0.2:Docker Desktop 4.27.1 (136059)
I0306 14:36:13.492340 3968 cli_runner.go:164] Run: docker system info --format "{{json .}}"
I0306 14:36:13.840507 3968 info.go:266] docker info: {ID:4ab225e4-7bba-41a2-acb2-3a6266852d00 Containers:5 ContainersRunning:1 ContainersPaused:0 ContainersStopped:4 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:65 OomKillDisable:true NGoroutines:93 SystemTime:2024-03-06 13:36:13.800109994 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:14 KernelVersion:5.15.133.1-microsoft-standard-WSL2 OperatingSystem:Docker Desktop OSType:linux Architecture:x86_64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:8 MemTotal:8260661248 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy:http.docker.internal:3128 HTTPSProxy:http.docker.internal:3128 NoProxy:hubproxy.docker.internal Name:docker-desktop Labels:[] ExperimentalBuild:false ServerVersion:25.0.2 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae07eda36dd25f8a1b98dfbf587313b99c0190bb Expected:ae07eda36dd25f8a1b98dfbf587313b99c0190bb} RuncCommit:{ID:v1.1.12-0-g51d5e94 Expected:v1.1.12-0-g51d5e94} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=seccomp,profile=unconfined] ProductLicense: Warnings:[WARNING: No blkio throttle.read_bps_device support WARNING: No blkio throttle.write_bps_device support WARNING: No blkio throttle.read_iops_device support WARNING: No blkio throttle.write_iops_device support WARNING: daemon is not using the default seccomp profile] ServerErrors:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:C:\Program Files\Docker\cli-plugins\docker-buildx.exe SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.12.1-desktop.4] map[Name:compose Path:C:\Program Files\Docker\cli-plugins\docker-compose.exe SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.24.3-desktop.1] map[Name:debug Path:C:\Program Files\Docker\cli-plugins\docker-debug.exe SchemaVersion:0.1.0 ShortDescription:Get a shell into any image or container. Vendor:Docker Inc. Version:0.0.22] map[Name:dev Path:C:\Program Files\Docker\cli-plugins\docker-dev.exe SchemaVersion:0.1.0 ShortDescription:Docker Dev Environments Vendor:Docker Inc. Version:v0.1.0] map[Name:extension Path:C:\Program Files\Docker\cli-plugins\docker-extension.exe SchemaVersion:0.1.0 ShortDescription:Manages Docker extensions Vendor:Docker Inc. Version:v0.2.21] map[Name:feedback Path:C:\Program Files\Docker\cli-plugins\docker-feedback.exe SchemaVersion:0.1.0 ShortDescription:Provide feedback, right in your terminal! Vendor:Docker Inc. Version:v1.0.4] map[Name:init Path:C:\Program Files\Docker\cli-plugins\docker-init.exe SchemaVersion:0.1.0 ShortDescription:Creates Docker-related starter files for your project Vendor:Docker Inc. Version:v1.0.0] map[Name:sbom Path:C:\Program Files\Docker\cli-plugins\docker-sbom.exe SchemaVersion:0.1.0 ShortDescription:View the packaged-based Software Bill Of Materials (SBOM) for an image URL:https://github.com/docker/sbom-cli-plugin Vendor:Anchore Inc. Version:0.6.0] map[Name:scout Path:C:\Program Files\Docker\cli-plugins\docker-scout.exe SchemaVersion:0.1.0 ShortDescription:Docker Scout Vendor:Docker Inc. Version:v1.3.0]] Warnings:<nil>}}
I0306 14:36:13.844641 3968 out.go:177] * Using the docker driver based on existing profile
I0306 14:36:13.846800 3968 start.go:298] selected driver: docker
I0306 14:36:13.846800 3968 start.go:902] validating driver "docker" against &{Name:minikube KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0 Memory:6048 CPUs:4 DiskSize:102400 VMDriver: Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:0 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.24.17 ClusterName:minikube Namespace:default APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI: NodeIP: NodePort:8443 NodeName:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.24.17 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[default-storageclass:true ingress:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\Administrator:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 AutoPauseInterval:1m0s GPUs:}
I0306 14:36:13.846800 3968 start.go:913] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
I0306 14:36:13.894687 3968 cli_runner.go:164] Run: docker system info --format "{{json .}}"
I0306 14:36:14.242885 3968 info.go:266] docker info: {ID:4ab225e4-7bba-41a2-acb2-3a6266852d00 Containers:5 ContainersRunning:1 ContainersPaused:0 ContainersStopped:4 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:65 OomKillDisable:true NGoroutines:93 SystemTime:2024-03-06 13:36:14.201966749 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:14 KernelVersion:5.15.133.1-microsoft-standard-WSL2 OperatingSystem:Docker Desktop OSType:linux Architecture:x86_64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:8 MemTotal:8260661248 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy:http.docker.internal:3128 HTTPSProxy:http.docker.internal:3128 NoProxy:hubproxy.docker.internal Name:docker-desktop Labels:[] ExperimentalBuild:false ServerVersion:25.0.2 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:ae07eda36dd25f8a1b98dfbf587313b99c0190bb Expected:ae07eda36dd25f8a1b98dfbf587313b99c0190bb} RuncCommit:{ID:v1.1.12-0-g51d5e94 Expected:v1.1.12-0-g51d5e94} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=seccomp,profile=unconfined] ProductLicense: Warnings:[WARNING: No blkio throttle.read_bps_device support WARNING: No blkio throttle.write_bps_device support WARNING: No blkio throttle.read_iops_device support WARNING: No blkio throttle.write_iops_device support WARNING: daemon is not using the default seccomp profile] ServerErrors:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:C:\Program Files\Docker\cli-plugins\docker-buildx.exe SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.12.1-desktop.4] map[Name:compose Path:C:\Program Files\Docker\cli-plugins\docker-compose.exe SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.24.3-desktop.1] map[Name:debug Path:C:\Program Files\Docker\cli-plugins\docker-debug.exe SchemaVersion:0.1.0 ShortDescription:Get a shell into any image or container. Vendor:Docker Inc. Version:0.0.22] map[Name:dev Path:C:\Program Files\Docker\cli-plugins\docker-dev.exe SchemaVersion:0.1.0 ShortDescription:Docker Dev Environments Vendor:Docker Inc. Version:v0.1.0] map[Name:extension Path:C:\Program Files\Docker\cli-plugins\docker-extension.exe SchemaVersion:0.1.0 ShortDescription:Manages Docker extensions Vendor:Docker Inc. Version:v0.2.21] map[Name:feedback Path:C:\Program Files\Docker\cli-plugins\docker-feedback.exe SchemaVersion:0.1.0 ShortDescription:Provide feedback, right in your terminal! Vendor:Docker Inc. Version:v1.0.4] map[Name:init Path:C:\Program Files\Docker\cli-plugins\docker-init.exe SchemaVersion:0.1.0 ShortDescription:Creates Docker-related starter files for your project Vendor:Docker Inc. Version:v1.0.0] map[Name:sbom Path:C:\Program Files\Docker\cli-plugins\docker-sbom.exe SchemaVersion:0.1.0 ShortDescription:View the packaged-based Software Bill Of Materials (SBOM) for an image URL:https://github.com/docker/sbom-cli-plugin Vendor:Anchore Inc. Version:0.6.0] map[Name:scout Path:C:\Program Files\Docker\cli-plugins\docker-scout.exe SchemaVersion:0.1.0 ShortDescription:Docker Scout Vendor:Docker Inc. Version:v1.3.0]] Warnings:<nil>}}
I0306 14:36:14.243417 3968 cni.go:84] Creating CNI manager for ""
I0306 14:36:14.243417 3968 cni.go:158] "docker" driver + "docker" container runtime found on kubernetes v1.24+, recommending bridge
I0306 14:36:14.243417 3968 start_flags.go:323] config:
{Name:minikube KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0 Memory:6048 CPUs:4 DiskSize:102400 VMDriver: Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:0 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.24.17 ClusterName:minikube Namespace:default APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI: NodeIP: NodePort:8443 NodeName:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.24.17 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[default-storageclass:true ingress:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\Administrator:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 AutoPauseInterval:1m0s GPUs:}
I0306 14:36:14.247187 3968 out.go:177] * Starting control plane node minikube in cluster minikube
I0306 14:36:14.248878 3968 cache.go:121] Beginning downloading kic base image for docker with docker
I0306 14:36:14.249988 3968 out.go:177] * Pulling base image ...
I0306 14:36:14.251695 3968 preload.go:132] Checking if preload exists for k8s version v1.24.17 and runtime docker
I0306 14:36:14.251695 3968 image.go:79] Checking for gcr.io/k8s-minikube/kicbase:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0 in local docker daemon
I0306 14:36:14.251695 3968 preload.go:148] Found local preload: C:\Users\Administrator\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.24.17-docker-overlay2-amd64.tar.lz4
I0306 14:36:14.251695 3968 cache.go:56] Caching tarball of preloaded images
I0306 14:36:14.252262 3968 preload.go:174] Found C:\Users\Administrator\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.24.17-docker-overlay2-amd64.tar.lz4 in cache, skipping download
I0306 14:36:14.252262 3968 cache.go:59] Finished verifying existence of preloaded tar for v1.24.17 on docker
I0306 14:36:14.252789 3968 profile.go:148] Saving config to C:\Users\Administrator\.minikube\profiles\minikube\config.json ...
I0306 14:36:14.411587 3968 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0 to local cache
I0306 14:36:14.411587 3968 localpath.go:146] windows sanitize: C:\Users\Administrator\.minikube\cache\kic\amd64\kicbase:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0.tar -> C:\Users\Administrator\.minikube\cache\kic\amd64\kicbase_v0.0.42@sha256_d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0.tar
I0306 14:36:14.412152 3968 localpath.go:146] windows sanitize: C:\Users\Administrator\.minikube\cache\kic\amd64\kicbase:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0.tar -> C:\Users\Administrator\.minikube\cache\kic\amd64\kicbase_v0.0.42@sha256_d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0.tar
I0306 14:36:14.412152 3968 image.go:63] Checking for gcr.io/k8s-minikube/kicbase:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0 in local cache directory
I0306 14:36:14.412152 3968 image.go:66] Found gcr.io/k8s-minikube/kicbase:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0 in local cache directory, skipping pull
I0306 14:36:14.412152 3968 image.go:105] gcr.io/k8s-minikube/kicbase:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0 exists in cache, skipping pull
I0306 14:36:14.412152 3968 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0 as a tarball
I0306 14:36:14.412152 3968 cache.go:162] Loading gcr.io/k8s-minikube/kicbase:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0 from local cache
I0306 14:36:14.412152 3968 localpath.go:146] windows sanitize: C:\Users\Administrator\.minikube\cache\kic\amd64\kicbase:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0.tar -> C:\Users\Administrator\.minikube\cache\kic\amd64\kicbase_v0.0.42@sha256_d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0.tar
I0306 14:36:14.412724 3968 cache.go:168] failed to download gcr.io/k8s-minikube/kicbase:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0, will try fallback image if available: tarball: unexpected EOF
I0306 14:36:14.412724 3968 image.go:79] Checking for docker.io/kicbase/stable:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0 in local docker daemon
I0306 14:36:14.567323 3968 cache.go:149] Downloading docker.io/kicbase/stable:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0 to local cache
I0306 14:36:14.567323 3968 localpath.go:146] windows sanitize: C:\Users\Administrator\.minikube\cache\kic\amd64\stable:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0.tar -> C:\Users\Administrator\.minikube\cache\kic\amd64\stable_v0.0.42@sha256_d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0.tar
I0306 14:36:14.567323 3968 localpath.go:146] windows sanitize: C:\Users\Administrator\.minikube\cache\kic\amd64\stable:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0.tar -> C:\Users\Administrator\.minikube\cache\kic\amd64\stable_v0.0.42@sha256_d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0.tar
I0306 14:36:14.567323 3968 image.go:63] Checking for docker.io/kicbase/stable:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0 in local cache directory
I0306 14:36:14.567323 3968 image.go:66] Found docker.io/kicbase/stable:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0 in local cache directory, skipping pull
I0306 14:36:14.567323 3968 image.go:105] docker.io/kicbase/stable:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0 exists in cache, skipping pull
I0306 14:36:14.567323 3968 cache.go:152] successfully saved docker.io/kicbase/stable:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0 as a tarball
I0306 14:36:14.567323 3968 cache.go:162] Loading docker.io/kicbase/stable:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0 from local cache
I0306 14:36:14.567323 3968 localpath.go:146] windows sanitize: C:\Users\Administrator\.minikube\cache\kic\amd64\stable:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0.tar -> C:\Users\Administrator\.minikube\cache\kic\amd64\stable_v0.0.42@sha256_d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0.tar
I0306 14:36:14.567854 3968 cache.go:168] failed to download docker.io/kicbase/stable:v0.0.42@sha256:d35ac07dfda971cabee05e0deca8aeac772f885a5348e1a0c0b0a36db20fcfc0, will try fallback image if available: tarball: unexpected EOF
I0306 14:36:14.567854 3968 image.go:79] Checking for gcr.io/k8s-minikube/kicbase:v0.0.42 in local docker daemon
I0306 14:36:14.736902 3968 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase:v0.0.42 to local cache
I0306 14:36:14.736902 3968 localpath.go:146] windows sanitize: C:\Users\Administrator\.minikube\cache\kic\amd64\kicbase:v0.0.42.tar -> C:\Users\Administrator\.minikube\cache\kic\amd64\kicbase_v0.0.42.tar
I0306 14:36:14.736902 3968 localpath.go:146] windows sanitize: C:\Users\Administrator\.minikube\cache\kic\amd64\kicbase:v0.0.42.tar -> C:\Users\Administrator\.minikube\cache\kic\amd64\kicbase_v0.0.42.tar
I0306 14:36:14.736902 3968 image.go:63] Checking for gcr.io/k8s-minikube/kicbase:v0.0.42 in local cache directory
I0306 14:36:14.736902 3968 image.go:66] Found gcr.io/k8s-minikube/kicbase:v0.0.42 in local cache directory, skipping pull
I0306 14:36:14.736902 3968 image.go:105] gcr.io/k8s-minikube/kicbase:v0.0.42 exists in cache, skipping pull
I0306 14:36:14.737458 3968 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase:v0.0.42 as a tarball
I0306 14:36:14.737458 3968 cache.go:162] Loading gcr.io/k8s-minikube/kicbase:v0.0.42 from local cache
I0306 14:36:14.737458 3968 localpath.go:146] windows sanitize: C:\Users\Administrator\.minikube\cache\kic\amd64\kicbase:v0.0.42.tar -> C:\Users\Administrator\.minikube\cache\kic\amd64\kicbase_v0.0.42.tar
I0306 14:36:14.737458 3968 cache.go:168] failed to download gcr.io/k8s-minikube/kicbase:v0.0.42, will try fallback image if available: tarball: unexpected EOF
I0306 14:36:14.737458 3968 image.go:79] Checking for docker.io/kicbase/stable:v0.0.42 in local docker daemon
I0306 14:36:14.906541 3968 cache.go:149] Downloading docker.io/kicbase/stable:v0.0.42 to local cache
I0306 14:36:14.906541 3968 localpath.go:146] windows sanitize: C:\Users\Administrator\.minikube\cache\kic\amd64\stable:v0.0.42.tar -> C:\Users\Administrator\.minikube\cache\kic\amd64\stable_v0.0.42.tar
I0306 14:36:14.906541 3968 localpath.go:146] windows sanitize: C:\Users\Administrator\.minikube\cache\kic\amd64\stable:v0.0.42.tar -> C:\Users\Administrator\.minikube\cache\kic\amd64\stable_v0.0.42.tar
I0306 14:36:14.906541 3968 image.go:63] Checking for docker.io/kicbase/stable:v0.0.42 in local cache directory
I0306 14:36:14.907093 3968 image.go:66] Found docker.io/kicbase/stable:v0.0.42 in local cache directory, skipping pull
I0306 14:36:14.907093 3968 image.go:105] docker.io/kicbase/stable:v0.0.42 exists in cache, skipping pull
I0306 14:36:14.907093 3968 cache.go:152] successfully saved docker.io/kicbase/stable:v0.0.42 as a tarball
I0306 14:36:14.907093 3968 cache.go:162] Loading docker.io/kicbase/stable:v0.0.42 from local cache
I0306 14:36:14.907093 3968 localpath.go:146] windows sanitize: C:\Users\Administrator\.minikube\cache\kic\amd64\stable:v0.0.42.tar -> C:\Users\Administrator\.minikube\cache\kic\amd64\stable_v0.0.42.tar
I0306 14:36:14.907093 3968 cache.go:168] failed to download docker.io/kicbase/stable:v0.0.42, will try fallback image if available: tarball: unexpected EOF
E0306 14:36:14.907093 3968 cache.go:189] Error downloading kic artifacts: failed to download kic base image or any fallback image
I0306 14:36:14.907659 3968 cache.go:194] Successfully downloaded all kic artifacts
I0306 14:36:14.907659 3968 start.go:365] acquiring machines lock for minikube: {Name:mk3f259f80712de9f83f91ad0f12658ee47ef5bf Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
I0306 14:36:14.907659 3968 start.go:369] acquired machines lock for "minikube" in 0s
I0306 14:36:14.907659 3968 start.go:96] Skipping create...Using existing machine configuration
I0306 14:36:14.907659 3968 fix.go:54] fixHost starting:
I0306 14:36:14.924123 3968 cli_runner.go:164] Run: docker container inspect minikube --format={{.State.Status}}
I0306 14:36:15.074713 3968 fix.go:102] recreateIfNeeded on minikube: state=Running err=<nil>
W0306 14:36:15.074713 3968 fix.go:128] unexpected machine state, will restart: <nil>
I0306 14:36:15.078049 3968 out.go:177] * Updating the running docker "minikube" container ...
I0306 14:36:15.079739 3968 machine.go:88] provisioning docker machine ...
I0306 14:36:15.079739 3968 ubuntu.go:169] provisioning hostname "minikube"
I0306 14:36:15.088108 3968 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" minikube
I0306 14:36:15.236598 3968 main.go:141] libmachine: Using SSH client type: native
I0306 14:36:15.237123 3968 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13947e0] 0x1397320 <nil> [] 0s} 127.0.0.1 55080 <nil> <nil>}
I0306 14:36:15.237123 3968 main.go:141] libmachine: About to run SSH command:
sudo hostname minikube && echo "minikube" | sudo tee /etc/hostname
I0306 14:36:15.399986 3968 main.go:141] libmachine: SSH cmd err, output: Process exited with status 1: tee: /etc/hostname: Read-only file system
minikube
I0306 14:36:15.399986 3968 machine.go:91] provisioned docker machine in 320.2461ms
I0306 14:36:15.409235 3968 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
I0306 14:36:15.416414 3968 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" minikube
I0306 14:36:15.582671 3968 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:55080 SSHKeyPath:C:\Users\Administrator\.minikube\machines\minikube\id_rsa Username:docker}
I0306 14:36:15.710459 3968 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
I0306 14:36:15.719194 3968 fix.go:56] fixHost completed within 811.5342ms
I0306 14:36:15.719194 3968 start.go:83] releasing machines lock for "minikube", held for 811.5342ms
W0306 14:36:15.719194 3968 start.go:691] error starting host: provision: ssh command error:
command : sudo hostname minikube && echo "minikube" | sudo tee /etc/hostname
err : Process exited with status 1
output : tee: /etc/hostname: Read-only file system
minikube
W0306 14:36:15.719194 3968 out.go:239] ! StartHost failed, but will try again: provision: ssh command error:
command : sudo hostname minikube && echo "minikube" | sudo tee /etc/hostname
err : Process exited with status 1
output : tee: /etc/hostname: Read-only file system
minikube
I0306 14:36:15.719194 3968 start.go:706] Will try again in 5 seconds ...
I0306 14:36:20.732272 3968 start.go:365] acquiring machines lock for minikube: {Name:mk3f259f80712de9f83f91ad0f12658ee47ef5bf Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
I0306 14:36:20.732272 3968 start.go:369] acquired machines lock for "minikube" in 0s
I0306 14:36:20.732821 3968 start.go:96] Skipping create...Using existing machine configuration
I0306 14:36:20.732821 3968 fix.go:54] fixHost starting:
I0306 14:36:20.760191 3968 cli_runner.go:164] Run: docker container inspect minikube --format={{.State.Status}}
I0306 14:36:20.917051 3968 fix.go:102] recreateIfNeeded on minikube: state=Running err=<nil>
W0306 14:36:20.917051 3968 fix.go:128] unexpected machine state, will restart: <nil>
I0306 14:36:20.920005 3968 out.go:177] * Updating the running docker "minikube" container ...
I0306 14:36:20.922499 3968 machine.go:88] provisioning docker machine ...
I0306 14:36:20.922499 3968 ubuntu.go:169] provisioning hostname "minikube"
I0306 14:36:20.929456 3968 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" minikube
I0306 14:36:21.076373 3968 main.go:141] libmachine: Using SSH client type: native
I0306 14:36:21.076934 3968 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13947e0] 0x1397320 <nil> [] 0s} 127.0.0.1 55080 <nil> <nil>}
I0306 14:36:21.076934 3968 main.go:141] libmachine: About to run SSH command:
sudo hostname minikube && echo "minikube" | sudo tee /etc/hostname
I0306 14:36:21.249989 3968 main.go:141] libmachine: SSH cmd err, output: Process exited with status 1: tee: /etc/hostname: Read-only file system
minikube
I0306 14:36:21.249989 3968 machine.go:91] provisioned docker machine in 327.4894ms
I0306 14:36:21.259375 3968 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
I0306 14:36:21.267152 3968 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" minikube
I0306 14:36:21.408763 3968 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:55080 SSHKeyPath:C:\Users\Administrator\.minikube\machines\minikube\id_rsa Username:docker}
I0306 14:36:21.518038 3968 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
I0306 14:36:21.528674 3968 fix.go:56] fixHost completed within 795.8535ms
I0306 14:36:21.528674 3968 start.go:83] releasing machines lock for "minikube", held for 796.4026ms
W0306 14:36:21.528674 3968 out.go:239] * Failed to start docker container. Running "minikube delete" may fix it: provision: ssh command error:
command : sudo hostname minikube && echo "minikube" | sudo tee /etc/hostname
err : Process exited with status 1
output : tee: /etc/hostname: Read-only file system
minikube
I0306 14:36:21.532088 3968 out.go:177]
W0306 14:36:21.534328 3968 out.go:239] X Exiting due to GUEST_PROVISION: error provisioning guest: Failed to start host: provision: ssh command error:
command : sudo hostname minikube && echo "minikube" | sudo tee /etc/hostname
err : Process exited with status 1
output : tee: /etc/hostname: Read-only file system
minikube
W0306 14:36:21.534328 3968 out.go:239] *
W0306 14:36:21.535951 3968 out.go:239] ╭─────────────────────────────────────────────────────────────────────────────────────────────╮
│ │
│ * If the above advice does not help, please let us know: │
│ https://github.com/kubernetes/minikube/issues/new/choose │
│ │
│ * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue. │
│ │
╰─────────────────────────────────────────────────────────────────────────────────────────────╯
I0306 14:36:21.538231 3968 out.go:177]
*
* ==> Docker <==
* Mar 06 13:36:11 minikube dockerd[1244]: time="2024-03-06T13:36:11.612240060Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:11 minikube dockerd[1244]: time="2024-03-06T13:36:11.611741958Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:11 minikube dockerd[1244]: time="2024-03-06T13:36:11.611961259Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.651002531Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.693156865Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.712141510Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.713645890Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.714096584Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.714182783Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.718008731Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.719549911Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.719844207Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.720026504Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.720904192Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.724332346Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.727056810Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.727072910Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.727802300Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.728662088Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.729735874Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.729893472Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:21 minikube dockerd[1244]: time="2024-03-06T13:36:21.730232667Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.796790853Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.844958177Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.845132576Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.846315764Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.846317164Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.848486142Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.848624841Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.848657541Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.848744540Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.848845239Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.849003037Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.860416825Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.860431725Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.860454024Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.860472024Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.860504424Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.860541623Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.860781221Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:31 minikube dockerd[1244]: time="2024-03-06T13:36:31.880137030Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.911036554Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.947247028Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.973077296Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.974050687Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.974198286Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.974677182Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.974789481Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.976232668Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.976735663Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.977017461Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.978027052Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.978668546Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.978995143Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.979318440Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.981099324Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.985514584Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.988860754Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.994158606Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
Mar 06 13:36:41 minikube dockerd[1244]: time="2024-03-06T13:36:41.998826565Z" level=warning msg="failed to get endpoint_count map from store: open /var/lib/docker/network/files/local-kv.db: read-only file system"
*
* ==> container status <==
* /bin/bash: line 1: 13869 Bus error sudo `which crictl || echo crictl` ps -a
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
11ccd35e8694 6e38f40d628d "/storage-provisioner" 8 minutes ago Up 8 minutes k8s_storage-provisioner_storage-provisioner_kube-system_1a4a2d9c-ed67-4528-a2a7-489836ac0b3a_2
1c552243640e ghcr.io/jenkins-x/jx-git-operator "/bin/sh -c 'echo 'n…" 8 minutes ago Up 8 minutes k8s_jx-git-operator_jx-git-operator-5cb6998cc5-9s7hg_jx-git-operator_ce9c19f2-e31b-4995-b284-abf3744b0fa8_11
65b0011e0168 a4ca41631cc7 "/coredns -conf /etc…" 8 minutes ago Up 8 minutes k8s_coredns_coredns-57575c5f89-l67hq_kube-system_283d354d-8900-4335-9805-4ca0969aabd5_1
dc4316d1b743 registry.k8s.io/pause:3.7 "/pause" 8 minutes ago Up 8 minutes k8s_POD_jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s_jx-git-operator_9486e5c9-3e59-4a8d-801c-f93969178abd_1
3b29fa3bd946 registry.k8s.io/ingress-nginx/controller "/usr/bin/dumb-init …" 8 minutes ago Up 8 minutes k8s_controller_ingress-nginx-controller-6c8c79c589-rxr8l_ingress-nginx_1023dfe7-dc37-431e-87eb-6a9dd223e9f9_1
4028e7afedbd registry.k8s.io/pause:3.7 "/pause" 8 minutes ago Up 8 minutes k8s_POD_jx-git-operator-5cb6998cc5-9s7hg_jx-git-operator_ce9c19f2-e31b-4995-b284-abf3744b0fa8_1
b2f3e37b7d72 registry.k8s.io/pause:3.7 "/pause" 8 minutes ago Up 8 minutes k8s_POD_coredns-57575c5f89-l67hq_kube-system_283d354d-8900-4335-9805-4ca0969aabd5_2
a5b0b822e0b4 registry.k8s.io/pause:3.7 "/pause" 8 minutes ago Up 8 minutes k8s_POD_ingress-nginx-controller-6c8c79c589-rxr8l_ingress-nginx_1023dfe7-dc37-431e-87eb-6a9dd223e9f9_1
bbb3450488ce e9870aeebd11 "/usr/local/bin/kube…" 9 minutes ago Up 9 minutes k8s_kube-proxy_kube-proxy-zdc5k_kube-system_ca65e257-6da5-4cd8-9ea0-83422818229c_1
1c817bd69f8e 6e38f40d628d "/storage-provisioner" 9 minutes ago Exited (1) 8 minutes ago k8s_storage-provisioner_storage-provisioner_kube-system_1a4a2d9c-ed67-4528-a2a7-489836ac0b3a_1
b50a4fcd35c7 registry.k8s.io/pause:3.7 "/pause" 9 minutes ago Up 9 minutes k8s_POD_kube-proxy-zdc5k_kube-system_ca65e257-6da5-4cd8-9ea0-83422818229c_1
f260032b7359 registry.k8s.io/pause:3.7 "/pause" 9 minutes ago Up 9 minutes k8s_POD_storage-provisioner_kube-system_1a4a2d9c-ed67-4528-a2a7-489836ac0b3a_1
489cea8e1948 1d5992d42449 "kube-scheduler --au…" 9 minutes ago Up 9 minutes k8s_kube-scheduler_kube-scheduler-minikube_kube-system_9d164c4397a8b73bf91d9e7b3f0a3a24_1
570e9ffa8942 fce326961ae2 "etcd --advertise-cl…" 9 minutes ago Up 9 minutes k8s_etcd_etcd-minikube_kube-system_367a235a0a591dcca7fb45c93337a4bb_1
0a451fb36b7e 72f30124dd6d "kube-controller-man…" 9 minutes ago Up 9 minutes k8s_kube-controller-manager_kube-controller-manager-minikube_kube-system_efa585a769c6ad7258884dcdaed6e07e_1
2183b60e1f82 4f1c5007cffa "kube-apiserver --ad…" 9 minutes ago Up 9 minutes k8s_kube-apiserver_kube-apiserver-minikube_kube-system_23abaa28abb68e28acf76909897fb6ec_1
5abf7c41fec1 registry.k8s.io/pause:3.7 "/pause" 9 minutes ago Up 9 minutes k8s_POD_kube-scheduler-minikube_kube-system_9d164c4397a8b73bf91d9e7b3f0a3a24_1
50dec3348e7f registry.k8s.io/pause:3.7 "/pause" 9 minutes ago Up 9 minutes k8s_POD_kube-controller-manager-minikube_kube-system_efa585a769c6ad7258884dcdaed6e07e_1
171f204e1878 registry.k8s.io/pause:3.7 "/pause" 9 minutes ago Up 9 minutes k8s_POD_kube-apiserver-minikube_kube-system_23abaa28abb68e28acf76909897fb6ec_1
6ab7cc2f9aee registry.k8s.io/pause:3.7 "/pause" 9 minutes ago Up 9 minutes k8s_POD_etcd-minikube_kube-system_367a235a0a591dcca7fb45c93337a4bb_1
0b020ea48fde 5e8e65609840 "/bin/sh -c 'echo 'n…" 4 hours ago Exited (1) 4 hours ago k8s_jx-git-operator_jx-git-operator-5cb6998cc5-9s7hg_jx-git-operator_ce9c19f2-e31b-4995-b284-abf3744b0fa8_10
5b70bd974efd registry.k8s.io/pause:3.7 "/pause" 5 hours ago Exited (0) 4 hours ago k8s_POD_jx-git-operator-5cb6998cc5-9s7hg_jx-git-operator_ce9c19f2-e31b-4995-b284-abf3744b0fa8_0
613c2a8c24aa registry.k8s.io/ingress-nginx/controller "/usr/bin/dumb-init …" 5 hours ago Exited (255) 9 minutes ago k8s_controller_ingress-nginx-controller-6c8c79c589-rxr8l_ingress-nginx_1023dfe7-dc37-431e-87eb-6a9dd223e9f9_0
fcdab67170cd registry.k8s.io/pause:3.7 "/pause" 5 hours ago Exited (0) 4 hours ago k8s_POD_ingress-nginx-controller-6c8c79c589-rxr8l_ingress-nginx_1023dfe7-dc37-431e-87eb-6a9dd223e9f9_0
ab42ceaf129b a4ca41631cc7 "/coredns -conf /etc…" 5 hours ago Exited (0) 4 hours ago k8s_coredns_coredns-57575c5f89-l67hq_kube-system_283d354d-8900-4335-9805-4ca0969aabd5_0
114ae7bf8846 registry.k8s.io/pause:3.7 "/pause" 5 hours ago Exited (0) 4 hours ago k8s_POD_coredns-57575c5f89-l67hq_kube-system_283d354d-8900-4335-9805-4ca0969aabd5_1
19198b9869f0 e9870aeebd11 "/usr/local/bin/kube…" 5 hours ago Exited (2) 4 hours ago k8s_kube-proxy_kube-proxy-zdc5k_kube-system_ca65e257-6da5-4cd8-9ea0-83422818229c_0
c5d49704e984 registry.k8s.io/pause:3.7 "/pause" 5 hours ago Exited (0) 4 hours ago k8s_POD_kube-proxy-zdc5k_kube-system_ca65e257-6da5-4cd8-9ea0-83422818229c_0
01a143e7f0f7 fce326961ae2 "etcd --advertise-cl…" 5 hours ago Exited (0) 4 hours ago k8s_etcd_etcd-minikube_kube-system_367a235a0a591dcca7fb45c93337a4bb_0
7ef75b22dab1 72f30124dd6d "kube-controller-man…" 5 hours ago Exited (2) 4 hours ago k8s_kube-controller-manager_kube-controller-manager-minikube_kube-system_efa585a769c6ad7258884dcdaed6e07e_0
d81266b000c6 1d5992d42449 "kube-scheduler --au…" 5 hours ago Exited (255) 9 minutes ago k8s_kube-scheduler_kube-scheduler-minikube_kube-system_9d164c4397a8b73bf91d9e7b3f0a3a24_0
09d1d453a222 4f1c5007cffa "kube-apiserver --ad…" 5 hours ago Exited (255) 9 minutes ago k8s_kube-apiserver_kube-apiserver-minikube_kube-system_23abaa28abb68e28acf76909897fb6ec_0
d03da200fc49 registry.k8s.io/pause:3.7 "/pause" 5 hours ago Exited (0) 4 hours ago k8s_POD_kube-apiserver-minikube_kube-system_23abaa28abb68e28acf76909897fb6ec_0
419bd7ba7998 registry.k8s.io/pause:3.7 "/pause" 5 hours ago Exited (0) 4 hours ago k8s_POD_etcd-minikube_kube-system_367a235a0a591dcca7fb45c93337a4bb_0
4021876b6895 registry.k8s.io/pause:3.7 "/pause" 5 hours ago Exited (0) 4 hours ago k8s_POD_kube-scheduler-minikube_kube-system_9d164c4397a8b73bf91d9e7b3f0a3a24_0
c41cb7f0d4c1 registry.k8s.io/pause:3.7 "/pause" 5 hours ago Exited (0) 4 hours ago k8s_POD_kube-controller-manager-minikube_kube-system_efa585a769c6ad7258884dcdaed6e07e_0
*
* ==> controller_ingress [3b29fa3bd946] <==
* I0306 13:27:49.186234 7 event.go:298] Event(v1.ObjectReference{Kind:"ConfigMap", Namespace:"ingress-nginx", Name:"ingress-nginx-controller", UID:"b6cff7e1-acae-4c9c-96fa-8d17ba2bd0ec", APIVersion:"v1", ResourceVersion:"400", FieldPath:""}): type: 'Normal' reason: 'CREATE' ConfigMap ingress-nginx/ingress-nginx-controller
I0306 13:27:49.196994 7 event.go:298] Event(v1.ObjectReference{Kind:"ConfigMap", Namespace:"ingress-nginx", Name:"udp-services", UID:"8a2ebb14-dc06-44e8-b973-f6e514fed897", APIVersion:"v1", ResourceVersion:"402", FieldPath:""}): type: 'Normal' reason: 'CREATE' ConfigMap ingress-nginx/udp-services
I0306 13:27:50.357515 7 nginx.go:303] "Starting NGINX process"
I0306 13:27:50.357614 7 leaderelection.go:245] attempting to acquire leader lease ingress-nginx/ingress-nginx-leader...
I0306 13:27:50.357999 7 nginx.go:323] "Starting validation webhook" address=":8443" certPath="/usr/local/certificates/cert" keyPath="/usr/local/certificates/key"
I0306 13:27:50.358244 7 controller.go:190] "Configuration changes detected, backend reload required"
I0306 13:27:50.376784 7 leaderelection.go:255] successfully acquired lease ingress-nginx/ingress-nginx-leader
I0306 13:27:50.376962 7 status.go:84] "New leader elected" identity="ingress-nginx-controller-6c8c79c589-rxr8l"
I0306 13:27:50.381860 7 status.go:219] "POD is not ready" pod="ingress-nginx/ingress-nginx-controller-6c8c79c589-rxr8l" node="minikube"
I0306 13:27:50.452362 7 controller.go:210] "Backend successfully reloaded"
I0306 13:27:50.452526 7 controller.go:221] "Initial sync, sleeping for 1 second"
I0306 13:27:50.452597 7 event.go:298] Event(v1.ObjectReference{Kind:"Pod", Namespace:"ingress-nginx", Name:"ingress-nginx-controller-6c8c79c589-rxr8l", UID:"1023dfe7-dc37-431e-87eb-6a9dd223e9f9", APIVersion:"v1", ResourceVersion:"4147", FieldPath:""}): type: 'Normal' reason: 'RELOAD' NGINX reload triggered due to a change in configuration
E0306 13:29:58.790725 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": context deadline exceeded
I0306 13:29:58.790808 7 leaderelection.go:280] failed to renew lease ingress-nginx/ingress-nginx-leader: timed out waiting for the condition
I0306 13:29:58.791034 7 leaderelection.go:245] attempting to acquire leader lease ingress-nginx/ingress-nginx-leader...
E0306 13:29:58.791101 7 status.go:104] "error running poll" err="timed out waiting for the condition"
E0306 13:30:50.388244 7 queue.go:131] "requeuing" err="the server was unable to return a response in the time allotted, but may still be processing the request (get pods)" key="&ObjectMeta{Name:sync status,GenerateName:,Namespace:,SelfLink:,UID:,ResourceVersion:,Generation:0,CreationTimestamp:0001-01-01 00:00:00 +0000 UTC,DeletionTimestamp:<nil>,DeletionGracePeriodSeconds:nil,Labels:map[string]string{},Annotations:map[string]string{},OwnerReferences:[]OwnerReference{},Finalizers:[],ManagedFields:[]ManagedFieldsEntry{},}"
E0306 13:30:58.792793 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: the server was unable to return a response in the time allotted, but may still be processing the request (get leases.coordination.k8s.io ingress-nginx-leader)
E0306 13:31:16.978658 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: apiserver is shutting down
E0306 13:31:24.825025 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:31:29.768726 7 queue.go:131] "requeuing" err="Get \"https://10.96.0.1:443/api/v1/namespaces/ingress-nginx/pods?labelSelector=app.kubernetes.io%!F(MISSING)component%!D(MISSING)controller%!C(MISSING)app.kubernetes.io%!F(MISSING)instance%!D(MISSING)ingress-nginx%!C(MISSING)app.kubernetes.io%!F(MISSING)name%!D(MISSING)ingress-nginx%!C(MISSING)gcp-auth-skip-secret%!D(MISSING)true%!C(MISSING)pod-template-hash%!D(MISSING)6c8c79c589\": dial tcp 10.96.0.1:443: connect: connection refused - error from a previous attempt: http2: server sent GOAWAY and closed the connection; LastStreamID=143, ErrCode=NO_ERROR, debug=\"\"" key="&ObjectMeta{Name:sync status,GenerateName:,Namespace:,SelfLink:,UID:,ResourceVersion:,Generation:0,CreationTimestamp:0001-01-01 00:00:00 +0000 UTC,DeletionTimestamp:<nil>,DeletionGracePeriodSeconds:nil,Labels:map[string]string{},Annotations:map[string]string{},OwnerReferences:[]OwnerReference{},Finalizers:[],ManagedFields:[]ManagedFieldsEntry{},}"
E0306 13:31:29.779790 7 queue.go:131] "requeuing" err="Get \"https://10.96.0.1:443/api/v1/namespaces/ingress-nginx/pods?labelSelector=app.kubernetes.io%!F(MISSING)component%!D(MISSING)controller%!C(MISSING)app.kubernetes.io%!F(MISSING)instance%!D(MISSING)ingress-nginx%!C(MISSING)app.kubernetes.io%!F(MISSING)name%!D(MISSING)ingress-nginx%!C(MISSING)gcp-auth-skip-secret%!D(MISSING)true%!C(MISSING)pod-template-hash%!D(MISSING)6c8c79c589\": dial tcp 10.96.0.1:443: connect: connection refused" key="&ObjectMeta{Name:sync status,GenerateName:,Namespace:,SelfLink:,UID:,ResourceVersion:,Generation:0,CreationTimestamp:0001-01-01 00:00:00 +0000 UTC,DeletionTimestamp:<nil>,DeletionGracePeriodSeconds:nil,Labels:map[string]string{},Annotations:map[string]string{},OwnerReferences:[]OwnerReference{},Finalizers:[],ManagedFields:[]ManagedFieldsEntry{},}"
E0306 13:31:29.801094 7 queue.go:131] "requeuing" err="Get \"https://10.96.0.1:443/api/v1/namespaces/ingress-nginx/pods?labelSelector=app.kubernetes.io%!F(MISSING)component%!D(MISSING)controller%!C(MISSING)app.kubernetes.io%!F(MISSING)instance%!D(MISSING)ingress-nginx%!C(MISSING)app.kubernetes.io%!F(MISSING)name%!D(MISSING)ingress-nginx%!C(MISSING)gcp-auth-skip-secret%!D(MISSING)true%!C(MISSING)pod-template-hash%!D(MISSING)6c8c79c589\": dial tcp 10.96.0.1:443: connect: connection refused" key="&ObjectMeta{Name:sync status,GenerateName:,Namespace:,SelfLink:,UID:,ResourceVersion:,Generation:0,CreationTimestamp:0001-01-01 00:00:00 +0000 UTC,DeletionTimestamp:<nil>,DeletionGracePeriodSeconds:nil,Labels:map[string]string{},Annotations:map[string]string{},OwnerReferences:[]OwnerReference{},Finalizers:[],ManagedFields:[]ManagedFieldsEntry{},}"
E0306 13:31:29.841989 7 queue.go:131] "requeuing" err="Get \"https://10.96.0.1:443/api/v1/namespaces/ingress-nginx/pods?labelSelector=app.kubernetes.io%!F(MISSING)component%!D(MISSING)controller%!C(MISSING)app.kubernetes.io%!F(MISSING)instance%!D(MISSING)ingress-nginx%!C(MISSING)app.kubernetes.io%!F(MISSING)name%!D(MISSING)ingress-nginx%!C(MISSING)gcp-auth-skip-secret%!D(MISSING)true%!C(MISSING)pod-template-hash%!D(MISSING)6c8c79c589\": dial tcp 10.96.0.1:443: connect: connection refused" key="&ObjectMeta{Name:sync status,GenerateName:,Namespace:,SelfLink:,UID:,ResourceVersion:,Generation:0,CreationTimestamp:0001-01-01 00:00:00 +0000 UTC,DeletionTimestamp:<nil>,DeletionGracePeriodSeconds:nil,Labels:map[string]string{},Annotations:map[string]string{},OwnerReferences:[]OwnerReference{},Finalizers:[],ManagedFields:[]ManagedFieldsEntry{},}"
E0306 13:31:29.923562 7 queue.go:131] "requeuing" err="Get \"https://10.96.0.1:443/api/v1/namespaces/ingress-nginx/pods?labelSelector=app.kubernetes.io%!F(MISSING)component%!D(MISSING)controller%!C(MISSING)app.kubernetes.io%!F(MISSING)instance%!D(MISSING)ingress-nginx%!C(MISSING)app.kubernetes.io%!F(MISSING)name%!D(MISSING)ingress-nginx%!C(MISSING)gcp-auth-skip-secret%!D(MISSING)true%!C(MISSING)pod-template-hash%!D(MISSING)6c8c79c589\": dial tcp 10.96.0.1:443: connect: connection refused" key="&ObjectMeta{Name:sync status,GenerateName:,Namespace:,SelfLink:,UID:,ResourceVersion:,Generation:0,CreationTimestamp:0001-01-01 00:00:00 +0000 UTC,DeletionTimestamp:<nil>,DeletionGracePeriodSeconds:nil,Labels:map[string]string{},Annotations:map[string]string{},OwnerReferences:[]OwnerReference{},Finalizers:[],ManagedFields:[]ManagedFieldsEntry{},}"
E0306 13:31:30.084879 7 queue.go:131] "requeuing" err="Get \"https://10.96.0.1:443/api/v1/namespaces/ingress-nginx/pods?labelSelector=app.kubernetes.io%!F(MISSING)component%!D(MISSING)controller%!C(MISSING)app.kubernetes.io%!F(MISSING)instance%!D(MISSING)ingress-nginx%!C(MISSING)app.kubernetes.io%!F(MISSING)name%!D(MISSING)ingress-nginx%!C(MISSING)gcp-auth-skip-secret%!D(MISSING)true%!C(MISSING)pod-template-hash%!D(MISSING)6c8c79c589\": dial tcp 10.96.0.1:443: connect: connection refused" key="&ObjectMeta{Name:sync status,GenerateName:,Namespace:,SelfLink:,UID:,ResourceVersion:,Generation:0,CreationTimestamp:0001-01-01 00:00:00 +0000 UTC,DeletionTimestamp:<nil>,DeletionGracePeriodSeconds:nil,Labels:map[string]string{},Annotations:map[string]string{},OwnerReferences:[]OwnerReference{},Finalizers:[],ManagedFields:[]ManagedFieldsEntry{},}"
E0306 13:31:30.407137 7 queue.go:131] "requeuing" err="Get \"https://10.96.0.1:443/api/v1/namespaces/ingress-nginx/pods?labelSelector=app.kubernetes.io%!F(MISSING)component%!D(MISSING)controller%!C(MISSING)app.kubernetes.io%!F(MISSING)instance%!D(MISSING)ingress-nginx%!C(MISSING)app.kubernetes.io%!F(MISSING)name%!D(MISSING)ingress-nginx%!C(MISSING)gcp-auth-skip-secret%!D(MISSING)true%!C(MISSING)pod-template-hash%!D(MISSING)6c8c79c589\": dial tcp 10.96.0.1:443: connect: connection refused" key="&ObjectMeta{Name:sync status,GenerateName:,Namespace:,SelfLink:,UID:,ResourceVersion:,Generation:0,CreationTimestamp:0001-01-01 00:00:00 +0000 UTC,DeletionTimestamp:<nil>,DeletionGracePeriodSeconds:nil,Labels:map[string]string{},Annotations:map[string]string{},OwnerReferences:[]OwnerReference{},Finalizers:[],ManagedFields:[]ManagedFieldsEntry{},}"
E0306 13:31:31.048706 7 queue.go:131] "requeuing" err="Get \"https://10.96.0.1:443/api/v1/namespaces/ingress-nginx/pods?labelSelector=app.kubernetes.io%!F(MISSING)component%!D(MISSING)controller%!C(MISSING)app.kubernetes.io%!F(MISSING)instance%!D(MISSING)ingress-nginx%!C(MISSING)app.kubernetes.io%!F(MISSING)name%!D(MISSING)ingress-nginx%!C(MISSING)gcp-auth-skip-secret%!D(MISSING)true%!C(MISSING)pod-template-hash%!D(MISSING)6c8c79c589\": dial tcp 10.96.0.1:443: connect: connection refused" key="&ObjectMeta{Name:sync status,GenerateName:,Namespace:,SelfLink:,UID:,ResourceVersion:,Generation:0,CreationTimestamp:0001-01-01 00:00:00 +0000 UTC,DeletionTimestamp:<nil>,DeletionGracePeriodSeconds:nil,Labels:map[string]string{},Annotations:map[string]string{},OwnerReferences:[]OwnerReference{},Finalizers:[],ManagedFields:[]ManagedFieldsEntry{},}"
E0306 13:31:32.329991 7 queue.go:131] "requeuing" err="Get \"https://10.96.0.1:443/api/v1/namespaces/ingress-nginx/pods?labelSelector=app.kubernetes.io%!F(MISSING)component%!D(MISSING)controller%!C(MISSING)app.kubernetes.io%!F(MISSING)instance%!D(MISSING)ingress-nginx%!C(MISSING)app.kubernetes.io%!F(MISSING)name%!D(MISSING)ingress-nginx%!C(MISSING)gcp-auth-skip-secret%!D(MISSING)true%!C(MISSING)pod-template-hash%!D(MISSING)6c8c79c589\": dial tcp 10.96.0.1:443: connect: connection refused" key="&ObjectMeta{Name:sync status,GenerateName:,Namespace:,SelfLink:,UID:,ResourceVersion:,Generation:0,CreationTimestamp:0001-01-01 00:00:00 +0000 UTC,DeletionTimestamp:<nil>,DeletionGracePeriodSeconds:nil,Labels:map[string]string{},Annotations:map[string]string{},OwnerReferences:[]OwnerReference{},Finalizers:[],ManagedFields:[]ManagedFieldsEntry{},}"
E0306 13:31:34.891095 7 queue.go:131] "requeuing" err="Get \"https://10.96.0.1:443/api/v1/namespaces/ingress-nginx/pods?labelSelector=app.kubernetes.io%!F(MISSING)component%!D(MISSING)controller%!C(MISSING)app.kubernetes.io%!F(MISSING)instance%!D(MISSING)ingress-nginx%!C(MISSING)app.kubernetes.io%!F(MISSING)name%!D(MISSING)ingress-nginx%!C(MISSING)gcp-auth-skip-secret%!D(MISSING)true%!C(MISSING)pod-template-hash%!D(MISSING)6c8c79c589\": dial tcp 10.96.0.1:443: connect: connection refused" key="&ObjectMeta{Name:sync status,GenerateName:,Namespace:,SelfLink:,UID:,ResourceVersion:,Generation:0,CreationTimestamp:0001-01-01 00:00:00 +0000 UTC,DeletionTimestamp:<nil>,DeletionGracePeriodSeconds:nil,Labels:map[string]string{},Annotations:map[string]string{},OwnerReferences:[]OwnerReference{},Finalizers:[],ManagedFields:[]ManagedFieldsEntry{},}"
E0306 13:31:37.394099 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:31:40.012689 7 queue.go:131] "requeuing" err="Get \"https://10.96.0.1:443/api/v1/namespaces/ingress-nginx/pods?labelSelector=app.kubernetes.io%!F(MISSING)component%!D(MISSING)controller%!C(MISSING)app.kubernetes.io%!F(MISSING)instance%!D(MISSING)ingress-nginx%!C(MISSING)app.kubernetes.io%!F(MISSING)name%!D(MISSING)ingress-nginx%!C(MISSING)gcp-auth-skip-secret%!D(MISSING)true%!C(MISSING)pod-template-hash%!D(MISSING)6c8c79c589\": dial tcp 10.96.0.1:443: connect: connection refused" key="&ObjectMeta{Name:sync status,GenerateName:,Namespace:,SelfLink:,UID:,ResourceVersion:,Generation:0,CreationTimestamp:0001-01-01 00:00:00 +0000 UTC,DeletionTimestamp:<nil>,DeletionGracePeriodSeconds:nil,Labels:map[string]string{},Annotations:map[string]string{},OwnerReferences:[]OwnerReference{},Finalizers:[],ManagedFields:[]ManagedFieldsEntry{},}"
E0306 13:31:47.838339 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:31:50.254682 7 queue.go:131] "requeuing" err="Get \"https://10.96.0.1:443/api/v1/namespaces/ingress-nginx/pods?labelSelector=app.kubernetes.io%!F(MISSING)component%!D(MISSING)controller%!C(MISSING)app.kubernetes.io%!F(MISSING)instance%!D(MISSING)ingress-nginx%!C(MISSING)app.kubernetes.io%!F(MISSING)name%!D(MISSING)ingress-nginx%!C(MISSING)gcp-auth-skip-secret%!D(MISSING)true%!C(MISSING)pod-template-hash%!D(MISSING)6c8c79c589\": dial tcp 10.96.0.1:443: connect: connection refused" key="&ObjectMeta{Name:sync status,GenerateName:,Namespace:,SelfLink:,UID:,ResourceVersion:,Generation:0,CreationTimestamp:0001-01-01 00:00:00 +0000 UTC,DeletionTimestamp:<nil>,DeletionGracePeriodSeconds:nil,Labels:map[string]string{},Annotations:map[string]string{},OwnerReferences:[]OwnerReference{},Finalizers:[],ManagedFields:[]ManagedFieldsEntry{},}"
E0306 13:32:03.975765 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:32:10.736638 7 queue.go:131] "requeuing" err="Get \"https://10.96.0.1:443/api/v1/namespaces/ingress-nginx/pods?labelSelector=app.kubernetes.io%!F(MISSING)component%!D(MISSING)controller%!C(MISSING)app.kubernetes.io%!F(MISSING)instance%!D(MISSING)ingress-nginx%!C(MISSING)app.kubernetes.io%!F(MISSING)name%!D(MISSING)ingress-nginx%!C(MISSING)gcp-auth-skip-secret%!D(MISSING)true%!C(MISSING)pod-template-hash%!D(MISSING)6c8c79c589\": dial tcp 10.96.0.1:443: connect: connection refused" key="&ObjectMeta{Name:sync status,GenerateName:,Namespace:,SelfLink:,UID:,ResourceVersion:,Generation:0,CreationTimestamp:0001-01-01 00:00:00 +0000 UTC,DeletionTimestamp:<nil>,DeletionGracePeriodSeconds:nil,Labels:map[string]string{},Annotations:map[string]string{},OwnerReferences:[]OwnerReference{},Finalizers:[],ManagedFields:[]ManagedFieldsEntry{},}"
E0306 13:32:17.907712 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:32:26.515819 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:32:36.151221 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:32:51.697239 7 queue.go:131] "requeuing" err="Get \"https://10.96.0.1:443/api/v1/namespaces/ingress-nginx/pods?labelSelector=app.kubernetes.io%!F(MISSING)component%!D(MISSING)controller%!C(MISSING)app.kubernetes.io%!F(MISSING)instance%!D(MISSING)ingress-nginx%!C(MISSING)app.kubernetes.io%!F(MISSING)name%!D(MISSING)ingress-nginx%!C(MISSING)gcp-auth-skip-secret%!D(MISSING)true%!C(MISSING)pod-template-hash%!D(MISSING)6c8c79c589\": dial tcp 10.96.0.1:443: connect: connection refused" key="&ObjectMeta{Name:sync status,GenerateName:,Namespace:,SelfLink:,UID:,ResourceVersion:,Generation:0,CreationTimestamp:0001-01-01 00:00:00 +0000 UTC,DeletionTimestamp:<nil>,DeletionGracePeriodSeconds:nil,Labels:map[string]string{},Annotations:map[string]string{},OwnerReferences:[]OwnerReference{},Finalizers:[],ManagedFields:[]ManagedFieldsEntry{},}"
E0306 13:32:52.192317 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:33:06.378058 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:33:18.054737 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:33:33.626388 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:33:43.938327 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:33:52.632548 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:34:04.322219 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:34:13.617227 7 queue.go:131] "requeuing" err="Get \"https://10.96.0.1:443/api/v1/namespaces/ingress-nginx/pods?labelSelector=app.kubernetes.io%!F(MISSING)component%!D(MISSING)controller%!C(MISSING)app.kubernetes.io%!F(MISSING)instance%!D(MISSING)ingress-nginx%!C(MISSING)app.kubernetes.io%!F(MISSING)name%!D(MISSING)ingress-nginx%!C(MISSING)gcp-auth-skip-secret%!D(MISSING)true%!C(MISSING)pod-template-hash%!D(MISSING)6c8c79c589\": dial tcp 10.96.0.1:443: connect: connection refused" key="&ObjectMeta{Name:sync status,GenerateName:,Namespace:,SelfLink:,UID:,ResourceVersion:,Generation:0,CreationTimestamp:0001-01-01 00:00:00 +0000 UTC,DeletionTimestamp:<nil>,DeletionGracePeriodSeconds:nil,Labels:map[string]string{},Annotations:map[string]string{},OwnerReferences:[]OwnerReference{},Finalizers:[],ManagedFields:[]ManagedFieldsEntry{},}"
E0306 13:34:17.714165 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:34:33.381137 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:34:41.785165 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:34:55.729227 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:35:06.566529 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:35:17.376528 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:35:27.268599 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:35:42.988700 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:35:51.337540 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:36:07.390873 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:36:22.862807 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
E0306 13:36:38.068246 7 leaderelection.go:327] error retrieving resource lock ingress-nginx/ingress-nginx-leader: Get "https://10.96.0.1:443/apis/coordination.k8s.io/v1/namespaces/ingress-nginx/leases/ingress-nginx-leader": dial tcp 10.96.0.1:443: connect: connection refused
*
* ==> controller_ingress [613c2a8c24aa] <==
*
* ==> coredns [65b0011e0168] <==
* .:53
[INFO] plugin/reload: Running configuration MD5 = 512bc0e06a520fa44f35dc15de10fdd6
CoreDNS-1.8.6
linux/amd64, go1.17.1, 13a9191
[INFO] 127.0.0.1:45954 - 19197 "HINFO IN 6878486810444113369.6953337118571526147. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.134110664s
[INFO] 10.244.0.10:51833 - 43674 "A IN github.com.jx-git-operator.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000246258s
[INFO] 10.244.0.10:51833 - 53415 "AAAA IN github.com.jx-git-operator.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000342532s
[INFO] 10.244.0.10:47433 - 31630 "AAAA IN github.com.svc.cluster.local. udp 46 false 512" NXDOMAIN qr,aa,rd 139 0.000082521s
[INFO] 10.244.0.10:47433 - 49293 "A IN github.com.svc.cluster.local. udp 46 false 512" NXDOMAIN qr,aa,rd 139 0.000129906s
[INFO] 10.244.0.10:60982 - 62037 "AAAA IN github.com.cluster.local. udp 42 false 512" NXDOMAIN qr,aa,rd 135 0.000161127s
[INFO] 10.244.0.10:60982 - 43607 "A IN github.com.cluster.local. udp 42 false 512" NXDOMAIN qr,aa,rd 135 0.000205097s
[INFO] 10.244.0.10:55922 - 49269 "A IN github.com. udp 28 false 512" NOERROR qr,rd,ra 54 0.06516616s
[INFO] 10.244.0.10:55922 - 53363 "AAAA IN github.com. udp 28 false 512" NOERROR qr,rd,ra 28 0.145028181s
[INFO] 10.244.0.10:49067 - 59191 "A IN github.com.jx-git-operator.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000277004s
[INFO] 10.244.0.10:49067 - 47674 "AAAA IN github.com.jx-git-operator.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000434705s
[INFO] 10.244.0.10:32980 - 62158 "AAAA IN github.com.svc.cluster.local. udp 46 false 512" NXDOMAIN qr,aa,rd 139 0.000336003s
[INFO] 10.244.0.10:32980 - 35522 "A IN github.com.svc.cluster.local. udp 46 false 512" NXDOMAIN qr,aa,rd 139 0.000471806s
[INFO] 10.244.0.10:39634 - 6841 "A IN github.com.cluster.local. udp 42 false 512" NXDOMAIN qr,aa,rd 135 0.000376004s
[INFO] 10.244.0.10:39634 - 14013 "AAAA IN github.com.cluster.local. udp 42 false 512" NXDOMAIN qr,aa,rd 135 0.001232214s
[INFO] 10.244.0.10:58545 - 5711 "A IN github.com. udp 28 false 512" NOERROR qr,aa,rd,ra 54 0.000256703s
[INFO] 10.244.0.10:58545 - 49739 "AAAA IN github.com. udp 28 false 512" NOERROR qr,rd,ra 28 0.142843071s
[INFO] 10.244.0.10:46254 - 26521 "AAAA IN github.com.jx-git-operator.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000529677s
[INFO] 10.244.0.10:46254 - 11877 "A IN github.com.jx-git-operator.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000679171s
[INFO] 10.244.0.10:52499 - 51208 "AAAA IN github.com.svc.cluster.local. udp 46 false 512" NXDOMAIN qr,aa,rd 139 0.000307387s
[INFO] 10.244.0.10:52499 - 3598 "A IN github.com.svc.cluster.local. udp 46 false 512" NXDOMAIN qr,aa,rd 139 0.000508278s
[INFO] 10.244.0.10:38704 - 36042 "AAAA IN github.com.cluster.local. udp 42 false 512" NXDOMAIN qr,aa,rd 135 0.000245889s
[INFO] 10.244.0.10:38704 - 64567 "A IN github.com.cluster.local. udp 42 false 512" NXDOMAIN qr,aa,rd 135 0.000393683s
[INFO] 10.244.0.10:42021 - 56292 "A IN github.com. udp 28 false 512" NOERROR qr,rd,ra 54 0.006101636s
[INFO] 10.244.0.10:42021 - 64739 "AAAA IN github.com. udp 28 false 512" NOERROR qr,rd,ra 28 0.429040888s
[INFO] 10.244.0.10:56200 - 41387 "AAAA IN github.com.jx-git-operator.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000216778s
[INFO] 10.244.0.10:56200 - 61871 "A IN github.com.jx-git-operator.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000338066s
[INFO] 10.244.0.10:50104 - 9345 "AAAA IN github.com.svc.cluster.local. udp 46 false 512" NXDOMAIN qr,aa,rd 139 0.000236976s
[INFO] 10.244.0.10:50104 - 14469 "A IN github.com.svc.cluster.local. udp 46 false 512" NXDOMAIN qr,aa,rd 139 0.000402559s
[INFO] 10.244.0.10:49119 - 21215 "AAAA IN github.com.cluster.local. udp 42 false 512" NXDOMAIN qr,aa,rd 135 0.000117988s
[INFO] 10.244.0.10:49119 - 48858 "A IN github.com.cluster.local. udp 42 false 512" NXDOMAIN qr,aa,rd 135 0.000230476s
[INFO] 10.244.0.10:44645 - 14111 "A IN github.com. udp 28 false 512" NOERROR qr,rd,ra 54 0.111007593s
[INFO] 10.244.0.10:44645 - 4634 "AAAA IN github.com. udp 28 false 512" NOERROR qr,rd,ra 28 1.410077874s
[INFO] 10.244.0.10:52143 - 56046 "AAAA IN github.com.jx-git-operator.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000389583s
[INFO] 10.244.0.10:52143 - 13291 "A IN github.com.jx-git-operator.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000508378s
[INFO] 10.244.0.10:39887 - 22787 "A IN github.com.svc.cluster.local. udp 46 false 512" NXDOMAIN qr,aa,rd 139 0.000325886s
[INFO] 10.244.0.10:39887 - 37745 "AAAA IN github.com.svc.cluster.local. udp 46 false 512" NXDOMAIN qr,aa,rd 139 0.004252913s
[INFO] 10.244.0.10:58356 - 9695 "A IN github.com.cluster.local. udp 42 false 512" NXDOMAIN qr,aa,rd 135 0.000398282s
[INFO] 10.244.0.10:58356 - 23248 "AAAA IN github.com.cluster.local. udp 42 false 512" NXDOMAIN qr,aa,rd 135 0.004832387s
[INFO] 10.244.0.10:35010 - 31277 "A IN github.com. udp 28 false 512" NOERROR qr,aa,rd,ra 54 0.00022609s
[INFO] 10.244.0.10:35010 - 42529 "AAAA IN github.com. udp 28 false 512" NOERROR qr,rd,ra 28 0.121380653s
*
* ==> coredns [ab42ceaf129b] <==
*
* ==> describe nodes <==
*
* ==> dmesg <==
* sudo: unable to execute /usr/bin/dmesg: Input/output error
*
* ==> etcd [01a143e7f0f7] <==
*
* ==> etcd [570e9ffa8942] <==
*
* ==> kernel <==
*
* ==> kube-apiserver [09d1d453a222] <==
*
* ==> kube-apiserver [2183b60e1f82] <==
*
* ==> kube-controller-manager [0a451fb36b7e] <==
* I0306 13:35:34.864043 1 node_lifecycle_controller.go:1399] Initializing eviction metric for zone:
E0306 13:35:34.864700 1 node_lifecycle_controller.go:1108] Error updating node minikube: Put "https://192.168.49.2:8443/api/v1/nodes/minikube/status": dial tcp 192.168.49.2:8443: connect: connection refused
E0306 13:35:34.865095 1 node_lifecycle_controller.go:802] Failed while getting a Node to retry updating node health. Probably Node minikube was deleted.
E0306 13:35:34.865159 1 node_lifecycle_controller.go:807] Update health of Node '' from Controller error: Get "https://192.168.49.2:8443/api/v1/nodes/minikube": dial tcp 192.168.49.2:8443: connect: connection refused. Skipping - no pods will be evicted.
I0306 13:35:39.866435 1 node_lifecycle_controller.go:1399] Initializing eviction metric for zone:
E0306 13:35:39.867202 1 node_lifecycle_controller.go:1108] Error updating node minikube: Put "https://192.168.49.2:8443/api/v1/nodes/minikube/status": dial tcp 192.168.49.2:8443: connect: connection refused
E0306 13:35:39.867685 1 node_lifecycle_controller.go:802] Failed while getting a Node to retry updating node health. Probably Node minikube was deleted.
E0306 13:35:39.867738 1 node_lifecycle_controller.go:807] Update health of Node '' from Controller error: Get "https://192.168.49.2:8443/api/v1/nodes/minikube": dial tcp 192.168.49.2:8443: connect: connection refused. Skipping - no pods will be evicted.
I0306 13:35:44.868952 1 node_lifecycle_controller.go:1399] Initializing eviction metric for zone:
E0306 13:35:44.869909 1 node_lifecycle_controller.go:1108] Error updating node minikube: Put "https://192.168.49.2:8443/api/v1/nodes/minikube/status": dial tcp 192.168.49.2:8443: connect: connection refused
E0306 13:35:44.870488 1 node_lifecycle_controller.go:802] Failed while getting a Node to retry updating node health. Probably Node minikube was deleted.
E0306 13:35:44.870546 1 node_lifecycle_controller.go:807] Update health of Node '' from Controller error: Get "https://192.168.49.2:8443/api/v1/nodes/minikube": dial tcp 192.168.49.2:8443: connect: connection refused. Skipping - no pods will be evicted.
E0306 13:35:49.175432 1 resource_quota_controller.go:413] failed to discover resources: Get "https://192.168.49.2:8443/api": dial tcp 192.168.49.2:8443: connect: connection refused
W0306 13:35:49.496138 1 garbagecollector.go:749] failed to discover preferred resources: Get "https://192.168.49.2:8443/api": dial tcp 192.168.49.2:8443: connect: connection refused
I0306 13:35:49.871416 1 node_lifecycle_controller.go:1399] Initializing eviction metric for zone:
E0306 13:35:49.872195 1 node_lifecycle_controller.go:1108] Error updating node minikube: Put "https://192.168.49.2:8443/api/v1/nodes/minikube/status": dial tcp 192.168.49.2:8443: connect: connection refused
E0306 13:35:49.872671 1 node_lifecycle_controller.go:802] Failed while getting a Node to retry updating node health. Probably Node minikube was deleted.
E0306 13:35:49.872733 1 node_lifecycle_controller.go:807] Update health of Node '' from Controller error: Get "https://192.168.49.2:8443/api/v1/nodes/minikube": dial tcp 192.168.49.2:8443: connect: connection refused. Skipping - no pods will be evicted.
I0306 13:35:54.874033 1 node_lifecycle_controller.go:1399] Initializing eviction metric for zone:
E0306 13:35:54.874828 1 node_lifecycle_controller.go:1108] Error updating node minikube: Put "https://192.168.49.2:8443/api/v1/nodes/minikube/status": dial tcp 192.168.49.2:8443: connect: connection refused
E0306 13:35:54.875179 1 node_lifecycle_controller.go:802] Failed while getting a Node to retry updating node health. Probably Node minikube was deleted.
E0306 13:35:54.875209 1 node_lifecycle_controller.go:807] Update health of Node '' from Controller error: Get "https://192.168.49.2:8443/api/v1/nodes/minikube": dial tcp 192.168.49.2:8443: connect: connection refused. Skipping - no pods will be evicted.
I0306 13:35:59.876095 1 node_lifecycle_controller.go:1399] Initializing eviction metric for zone:
E0306 13:35:59.876653 1 node_lifecycle_controller.go:1108] Error updating node minikube: Put "https://192.168.49.2:8443/api/v1/nodes/minikube/status": dial tcp 192.168.49.2:8443: connect: connection refused
E0306 13:35:59.876995 1 node_lifecycle_controller.go:802] Failed while getting a Node to retry updating node health. Probably Node minikube was deleted.
E0306 13:35:59.877035 1 node_lifecycle_controller.go:807] Update health of Node '' from Controller error: Get "https://192.168.49.2:8443/api/v1/nodes/minikube": dial tcp 192.168.49.2:8443: connect: connection refused. Skipping - no pods will be evicted.
I0306 13:36:04.877807 1 node_lifecycle_controller.go:1399] Initializing eviction metric for zone:
E0306 13:36:04.878611 1 node_lifecycle_controller.go:1108] Error updating node minikube: Put "https://192.168.49.2:8443/api/v1/nodes/minikube/status": dial tcp 192.168.49.2:8443: connect: connection refused
E0306 13:36:04.879328 1 node_lifecycle_controller.go:802] Failed while getting a Node to retry updating node health. Probably Node minikube was deleted.
E0306 13:36:04.879393 1 node_lifecycle_controller.go:807] Update health of Node '' from Controller error: Get "https://192.168.49.2:8443/api/v1/nodes/minikube": dial tcp 192.168.49.2:8443: connect: connection refused. Skipping - no pods will be evicted.
I0306 13:36:09.880782 1 node_lifecycle_controller.go:1399] Initializing eviction metric for zone:
E0306 13:36:09.881417 1 node_lifecycle_controller.go:1108] Error updating node minikube: Put "https://192.168.49.2:8443/api/v1/nodes/minikube/status": dial tcp 192.168.49.2:8443: connect: connection refused
E0306 13:36:09.881680 1 node_lifecycle_controller.go:802] Failed while getting a Node to retry updating node health. Probably Node minikube was deleted.
E0306 13:36:09.881713 1 node_lifecycle_controller.go:807] Update health of Node '' from Controller error: Get "https://192.168.49.2:8443/api/v1/nodes/minikube": dial tcp 192.168.49.2:8443: connect: connection refused. Skipping - no pods will be evicted.
I0306 13:36:14.882607 1 node_lifecycle_controller.go:1399] Initializing eviction metric for zone:
E0306 13:36:14.883229 1 node_lifecycle_controller.go:1108] Error updating node minikube: Put "https://192.168.49.2:8443/api/v1/nodes/minikube/status": dial tcp 192.168.49.2:8443: connect: connection refused
E0306 13:36:14.883620 1 node_lifecycle_controller.go:802] Failed while getting a Node to retry updating node health. Probably Node minikube was deleted.
E0306 13:36:14.883656 1 node_lifecycle_controller.go:807] Update health of Node '' from Controller error: Get "https://192.168.49.2:8443/api/v1/nodes/minikube": dial tcp 192.168.49.2:8443: connect: connection refused. Skipping - no pods will be evicted.
E0306 13:36:19.176934 1 resource_quota_controller.go:413] failed to discover resources: Get "https://192.168.49.2:8443/api": dial tcp 192.168.49.2:8443: connect: connection refused
W0306 13:36:19.497988 1 garbagecollector.go:749] failed to discover preferred resources: Get "https://192.168.49.2:8443/api": dial tcp 192.168.49.2:8443: connect: connection refused
I0306 13:36:19.884473 1 node_lifecycle_controller.go:1399] Initializing eviction metric for zone:
E0306 13:36:19.885247 1 node_lifecycle_controller.go:1108] Error updating node minikube: Put "https://192.168.49.2:8443/api/v1/nodes/minikube/status": dial tcp 192.168.49.2:8443: connect: connection refused
E0306 13:36:19.885555 1 node_lifecycle_controller.go:802] Failed while getting a Node to retry updating node health. Probably Node minikube was deleted.
E0306 13:36:19.885619 1 node_lifecycle_controller.go:807] Update health of Node '' from Controller error: Get "https://192.168.49.2:8443/api/v1/nodes/minikube": dial tcp 192.168.49.2:8443: connect: connection refused. Skipping - no pods will be evicted.
I0306 13:36:24.886156 1 node_lifecycle_controller.go:1399] Initializing eviction metric for zone:
E0306 13:36:24.886707 1 node_lifecycle_controller.go:1108] Error updating node minikube: Put "https://192.168.49.2:8443/api/v1/nodes/minikube/status": dial tcp 192.168.49.2:8443: connect: connection refused
E0306 13:36:24.887181 1 node_lifecycle_controller.go:802] Failed while getting a Node to retry updating node health. Probably Node minikube was deleted.
E0306 13:36:24.887229 1 node_lifecycle_controller.go:807] Update health of Node '' from Controller error: Get "https://192.168.49.2:8443/api/v1/nodes/minikube": dial tcp 192.168.49.2:8443: connect: connection refused. Skipping - no pods will be evicted.
I0306 13:36:29.888197 1 node_lifecycle_controller.go:1399] Initializing eviction metric for zone:
E0306 13:36:29.888894 1 node_lifecycle_controller.go:1108] Error updating node minikube: Put "https://192.168.49.2:8443/api/v1/nodes/minikube/status": dial tcp 192.168.49.2:8443: connect: connection refused
E0306 13:36:29.889376 1 node_lifecycle_controller.go:802] Failed while getting a Node to retry updating node health. Probably Node minikube was deleted.
E0306 13:36:29.889425 1 node_lifecycle_controller.go:807] Update health of Node '' from Controller error: Get "https://192.168.49.2:8443/api/v1/nodes/minikube": dial tcp 192.168.49.2:8443: connect: connection refused. Skipping - no pods will be evicted.
I0306 13:36:34.890114 1 node_lifecycle_controller.go:1399] Initializing eviction metric for zone:
E0306 13:36:34.890728 1 node_lifecycle_controller.go:1108] Error updating node minikube: Put "https://192.168.49.2:8443/api/v1/nodes/minikube/status": dial tcp 192.168.49.2:8443: connect: connection refused
E0306 13:36:34.891145 1 node_lifecycle_controller.go:802] Failed while getting a Node to retry updating node health. Probably Node minikube was deleted.
E0306 13:36:34.891189 1 node_lifecycle_controller.go:807] Update health of Node '' from Controller error: Get "https://192.168.49.2:8443/api/v1/nodes/minikube": dial tcp 192.168.49.2:8443: connect: connection refused. Skipping - no pods will be evicted.
I0306 13:36:39.891912 1 node_lifecycle_controller.go:1399] Initializing eviction metric for zone:
E0306 13:36:39.892900 1 node_lifecycle_controller.go:1108] Error updating node minikube: Put "https://192.168.49.2:8443/api/v1/nodes/minikube/status": dial tcp 192.168.49.2:8443: connect: connection refused
E0306 13:36:39.893310 1 node_lifecycle_controller.go:802] Failed while getting a Node to retry updating node health. Probably Node minikube was deleted.
E0306 13:36:39.893368 1 node_lifecycle_controller.go:807] Update health of Node '' from Controller error: Get "https://192.168.49.2:8443/api/v1/nodes/minikube": dial tcp 192.168.49.2:8443: connect: connection refused. Skipping - no pods will be evicted.
*
* ==> kube-controller-manager [7ef75b22dab1] <==
*
* ==> kube-proxy [19198b9869f0] <==
*
* ==> kube-proxy [bbb3450488ce] <==
* E0306 13:27:38.761585 1 proxier.go:656] "Failed to read builtin modules file, you can ignore this message when kube-proxy is running inside container without mounting /lib/modules" err="open /lib/modules/5.15.133.1-microsoft-standard-WSL2/modules.builtin: no such file or directory" filePath="/lib/modules/5.15.133.1-microsoft-standard-WSL2/modules.builtin"
I0306 13:27:38.764184 1 proxier.go:666] "Failed to load kernel module with modprobe, you can ignore this message when kube-proxy is running inside container without mounting /lib/modules" moduleName="ip_vs"
I0306 13:27:38.766380 1 proxier.go:666] "Failed to load kernel module with modprobe, you can ignore this message when kube-proxy is running inside container without mounting /lib/modules" moduleName="ip_vs_rr"
I0306 13:27:38.768728 1 proxier.go:666] "Failed to load kernel module with modprobe, you can ignore this message when kube-proxy is running inside container without mounting /lib/modules" moduleName="ip_vs_wrr"
I0306 13:27:38.771105 1 proxier.go:666] "Failed to load kernel module with modprobe, you can ignore this message when kube-proxy is running inside container without mounting /lib/modules" moduleName="ip_vs_sh"
I0306 13:27:38.774177 1 proxier.go:666] "Failed to load kernel module with modprobe, you can ignore this message when kube-proxy is running inside container without mounting /lib/modules" moduleName="nf_conntrack"
I0306 13:27:38.799973 1 node.go:163] Successfully retrieved node IP: 192.168.49.2
I0306 13:27:38.800046 1 server_others.go:138] "Detected node IP" address="192.168.49.2"
I0306 13:27:38.800086 1 server_others.go:592] "Unknown proxy mode, assuming iptables proxy" proxyMode=""
I0306 13:27:39.136386 1 server_others.go:208] "Using iptables Proxier"
I0306 13:27:39.136475 1 server_others.go:215] "kube-proxy running in dual-stack mode" ipFamily=IPv4
I0306 13:27:39.136496 1 server_others.go:216] "Creating dualStackProxier for iptables"
I0306 13:27:39.136532 1 server_others.go:515] "Detect-local-mode set to ClusterCIDR, but no IPv6 cluster CIDR defined, , defaulting to no-op detect-local for IPv6"
I0306 13:27:39.136564 1 proxier.go:259] "Setting route_localnet=1, use nodePortAddresses to filter loopback addresses for NodePorts to skip it https://issues.k8s.io/90259"
I0306 13:27:39.136913 1 proxier.go:259] "Setting route_localnet=1, use nodePortAddresses to filter loopback addresses for NodePorts to skip it https://issues.k8s.io/90259"
I0306 13:27:39.137311 1 server.go:662] "Version info" version="v1.24.17"
I0306 13:27:39.137368 1 server.go:664] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
I0306 13:27:39.138200 1 config.go:444] "Starting node config controller"
I0306 13:27:39.138244 1 shared_informer.go:252] Waiting for caches to sync for node config
I0306 13:27:39.138441 1 config.go:226] "Starting endpoint slice config controller"
I0306 13:27:39.138471 1 shared_informer.go:252] Waiting for caches to sync for endpoint slice config
I0306 13:27:39.138546 1 config.go:317] "Starting service config controller"
I0306 13:27:39.138571 1 shared_informer.go:252] Waiting for caches to sync for service config
I0306 13:27:39.238524 1 shared_informer.go:259] Caches are synced for node config
I0306 13:27:39.238599 1 shared_informer.go:259] Caches are synced for endpoint slice config
I0306 13:27:39.238729 1 shared_informer.go:259] Caches are synced for service config
*
* ==> kube-scheduler [489cea8e1948] <==
* I0306 13:27:28.575953 1 serving.go:348] Generated self-signed cert in-memory
I0306 13:27:31.827801 1 server.go:147] "Starting Kubernetes Scheduler" version="v1.24.17"
I0306 13:27:31.827864 1 server.go:149] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
I0306 13:27:31.840905 1 secure_serving.go:210] Serving securely on 127.0.0.1:10259
I0306 13:27:31.841388 1 configmap_cafile_content.go:202] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
I0306 13:27:31.841452 1 shared_informer.go:252] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
I0306 13:27:31.841242 1 tlsconfig.go:240] "Starting DynamicServingCertificateController"
I0306 13:27:31.841324 1 requestheader_controller.go:169] Starting RequestHeaderAuthRequestController
I0306 13:27:31.845245 1 shared_informer.go:252] Waiting for caches to sync for RequestHeaderAuthRequestController
I0306 13:27:31.841349 1 configmap_cafile_content.go:202] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file"
I0306 13:27:31.845301 1 shared_informer.go:252] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
I0306 13:27:31.941922 1 shared_informer.go:259] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
I0306 13:27:31.945392 1 shared_informer.go:259] Caches are synced for RequestHeaderAuthRequestController
I0306 13:27:31.945500 1 shared_informer.go:259] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
*
* ==> kube-scheduler [d81266b000c6] <==
*
* ==> kubelet <==
* Mar 06 13:36:02 minikube kubelet[1891]: E0306 13:36:02.153960 1891 controller.go:144] failed to ensure lease exists, will retry in 7s, error: Get "https://control-plane.minikube.internal:8443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/minikube?timeout=10s": dial tcp 192.168.49.2:8443: connect: connection refused
Mar 06 13:36:03 minikube kubelet[1891]: E0306 13:36:03.567801 1891 kuberuntime_manager.go:905] container &Container{Name:kube-apiserver,Image:registry.k8s.io/kube-apiserver:v1.24.17,Command:[kube-apiserver --advertise-address=192.168.49.2 --allow-privileged=true --authorization-mode=Node,RBAC --client-ca-file=/var/lib/minikube/certs/ca.crt --enable-admission-plugins=NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota --enable-bootstrap-token-auth=true --etcd-cafile=/var/lib/minikube/certs/etcd/ca.crt --etcd-certfile=/var/lib/minikube/certs/apiserver-etcd-client.crt --etcd-keyfile=/var/lib/minikube/certs/apiserver-etcd-client.key --etcd-servers=https://127.0.0.1:2379 --kubelet-client-certificate=/var/lib/minikube/certs/apiserver-kubelet-client.crt --kubelet-client-key=/var/lib/minikube/certs/apiserver-kubelet-client.key --kubelet-preferred-address-types=InternalIP,ExternalIP,Hostname --proxy-client-cert-file=/var/lib/minikube/certs/front-proxy-client.crt --proxy-client-key-file=/var/lib/minikube/certs/front-proxy-client.key --requestheader-allowed-names=front-proxy-client --requestheader-client-ca-file=/var/lib/minikube/certs/front-proxy-ca.crt --requestheader-extra-headers-prefix=X-Remote-Extra- --requestheader-group-headers=X-Remote-Group --requestheader-username-headers=X-Remote-User --secure-port=8443 --service-account-issuer=https://kubernetes.default.svc.cluster.local --service-account-key-file=/var/lib/minikube/certs/sa.pub --service-account-signing-key-file=/var/lib/minikube/certs/sa.key --service-cluster-ip-range=10.96.0.0/12 --tls-cert-file=/var/lib/minikube/certs/apiserver.crt --tls-private-key-file=/var/lib/minikube/certs/apiserver.key],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{250 -3} {<nil>} 250m DecimalSI},},},VolumeMounts:[]VolumeMount{VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/ssl/certs,SubPath:,MountPropagation:nil,SubPathExpr:,},VolumeMount{Name:etc-ca-certificates,ReadOnly:true,MountPath:/etc/ca-certificates,SubPath:,MountPropagation:nil,SubPathExpr:,},VolumeMount{Name:k8s-certs,ReadOnly:true,MountPath:/var/lib/minikube/certs,SubPath:,MountPropagation:nil,SubPathExpr:,},VolumeMount{Name:usr-local-share-ca-certificates,ReadOnly:true,MountPath:/usr/local/share/ca-certificates,SubPath:,MountPropagation:nil,SubPathExpr:,},VolumeMount{Name:usr-share-ca-certificates,ReadOnly:true,MountPath:/usr/share/ca-certificates,SubPath:,MountPropagation:nil,SubPathExpr:,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8443 },Host:192.168.49.2,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:15,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:8,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8443 },Host:192.168.49.2,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:15,PeriodSeconds:1,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8443 },Host:192.168.49.2,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:15,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:24,TerminationGracePeriodSeconds:nil,},} start failed in pod kube-apiserver-minikube_kube-system(23abaa28abb68e28acf76909897fb6ec): CreateContainerConfigError: open /var/lib/kubelet/pods/23abaa28abb68e28acf76909897fb6ec/etc-hosts: read-only file system
Mar 06 13:36:03 minikube kubelet[1891]: E0306 13:36:03.567869 1891 pod_workers.go:965] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver\" with CreateContainerConfigError: \"open /var/lib/kubelet/pods/23abaa28abb68e28acf76909897fb6ec/etc-hosts: read-only file system\"" pod="kube-system/kube-apiserver-minikube" podUID=23abaa28abb68e28acf76909897fb6ec
Mar 06 13:36:05 minikube kubelet[1891]: E0306 13:36:05.566945 1891 kuberuntime_manager.go:905] container &Container{Name:etcd,Image:registry.k8s.io/etcd:3.5.6-0,Command:[etcd --advertise-client-urls=https://192.168.49.2:2379 --cert-file=/var/lib/minikube/certs/etcd/server.crt --client-cert-auth=true --data-dir=/var/lib/minikube/etcd --experimental-initial-corrupt-check=true --initial-advertise-peer-urls=https://192.168.49.2:2380 --initial-cluster=minikube=https://192.168.49.2:2380 --key-file=/var/lib/minikube/certs/etcd/server.key --listen-client-urls=https://127.0.0.1:2379,https://192.168.49.2:2379 --listen-metrics-urls=http://127.0.0.1:2381 --listen-peer-urls=https://192.168.49.2:2380 --name=minikube --peer-cert-file=/var/lib/minikube/certs/etcd/peer.crt --peer-client-cert-auth=true --peer-key-file=/var/lib/minikube/certs/etcd/peer.key --peer-trusted-ca-file=/var/lib/minikube/certs/etcd/ca.crt --proxy-refresh-interval=70000 --snapshot-count=10000 --trusted-ca-file=/var/lib/minikube/certs/etcd/ca.crt],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{100 -3} {<nil>} 100m DecimalSI},memory: {{104857600 0} {<nil>} 100Mi BinarySI},},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etcd-data,ReadOnly:false,MountPath:/var/lib/minikube/etcd,SubPath:,MountPropagation:nil,SubPathExpr:,},VolumeMount{Name:etcd-certs,ReadOnly:false,MountPath:/var/lib/minikube/certs/etcd,SubPath:,MountPropagation:nil,SubPathExpr:,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/health,Port:{0 2381 },Host:127.0.0.1,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:15,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:8,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/health,Port:{0 2381 },Host:127.0.0.1,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:15,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:24,TerminationGracePeriodSeconds:nil,},} start failed in pod etcd-minikube_kube-system(367a235a0a591dcca7fb45c93337a4bb): CreateContainerConfigError: open /var/lib/kubelet/pods/367a235a0a591dcca7fb45c93337a4bb/etc-hosts: read-only file system
Mar 06 13:36:05 minikube kubelet[1891]: E0306 13:36:05.567011 1891 pod_workers.go:965] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"etcd\" with CreateContainerConfigError: \"open /var/lib/kubelet/pods/367a235a0a591dcca7fb45c93337a4bb/etc-hosts: read-only file system\"" pod="kube-system/etcd-minikube" podUID=367a235a0a591dcca7fb45c93337a4bb
Mar 06 13:36:05 minikube kubelet[1891]: E0306 13:36:05.843902 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?resourceVersion=0&timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:05 minikube kubelet[1891]: E0306 13:36:05.844445 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:05 minikube kubelet[1891]: E0306 13:36:05.844778 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:05 minikube kubelet[1891]: E0306 13:36:05.845339 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:05 minikube kubelet[1891]: E0306 13:36:05.845784 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:05 minikube kubelet[1891]: E0306 13:36:05.845844 1891 kubelet_node_status.go:447] "Unable to update node status" err="update node status exceeds retry count"
Mar 06 13:36:06 minikube kubelet[1891]: I0306 13:36:06.563207 1891 status_manager.go:664] "Failed to get status for pod" podUID=9486e5c9-3e59-4a8d-801c-f93969178abd pod="jx-git-operator/jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s" err="Get \"https://control-plane.minikube.internal:8443/api/v1/namespaces/jx-git-operator/pods/jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:06 minikube kubelet[1891]: E0306 13:36:06.565163 1891 pod_workers.go:965] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"git-clone\" with ImagePullBackOff: \"Back-off pulling image \\\"ghcr.io/jenkins-x/jx-boot:3.10.136\\\"\"" pod="jx-git-operator/jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s" podUID=9486e5c9-3e59-4a8d-801c-f93969178abd
Mar 06 13:36:09 minikube kubelet[1891]: E0306 13:36:09.155502 1891 controller.go:144] failed to ensure lease exists, will retry in 7s, error: Get "https://control-plane.minikube.internal:8443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/minikube?timeout=10s": dial tcp 192.168.49.2:8443: connect: connection refused
Mar 06 13:36:11 minikube kubelet[1891]: I0306 13:36:11.565825 1891 status_manager.go:664] "Failed to get status for pod" podUID=23abaa28abb68e28acf76909897fb6ec pod="kube-system/kube-apiserver-minikube" err="Get \"https://control-plane.minikube.internal:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-minikube\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:11 minikube kubelet[1891]: I0306 13:36:11.566307 1891 status_manager.go:664] "Failed to get status for pod" podUID=9486e5c9-3e59-4a8d-801c-f93969178abd pod="jx-git-operator/jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s" err="Get \"https://control-plane.minikube.internal:8443/api/v1/namespaces/jx-git-operator/pods/jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:11 minikube kubelet[1891]: E0306 13:36:11.970714 1891 event.go:276] Unable to write event: '&v1.Event{TypeMeta:v1.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:v1.ObjectMeta{Name:"kube-apiserver-minikube.17ba2fff65f5db93", GenerateName:"", Namespace:"kube-system", SelfLink:"", UID:"", ResourceVersion:"", Generation:0, CreationTimestamp:time.Date(1, time.January, 1, 0, 0, 0, 0, time.UTC), DeletionTimestamp:<nil>, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string(nil), Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ZZZ_DeprecatedClusterName:"", ManagedFields:[]v1.ManagedFieldsEntry(nil)}, InvolvedObject:v1.ObjectReference{Kind:"Pod", Namespace:"kube-system", Name:"kube-apiserver-minikube", UID:"23abaa28abb68e28acf76909897fb6ec", APIVersion:"v1", ResourceVersion:"", FieldPath:"spec.containers{kube-apiserver}"}, Reason:"Unhealthy", Message:"Readiness probe failed: HTTP probe failed with statuscode: 500", Source:v1.EventSource{Component:"kubelet", Host:"minikube"}, FirstTimestamp:time.Date(2024, time.March, 6, 13, 29, 42, 514301843, time.Local), LastTimestamp:time.Date(2024, time.March, 6, 13, 29, 50, 545722985, time.Local), Count:5, Type:"Warning", EventTime:time.Date(1, time.January, 1, 0, 0, 0, 0, time.UTC), Series:(*v1.EventSeries)(nil), Action:"", Related:(*v1.ObjectReference)(nil), ReportingController:"", ReportingInstance:""}': 'Patch "https://control-plane.minikube.internal:8443/api/v1/namespaces/kube-system/events/kube-apiserver-minikube.17ba2fff65f5db93": dial tcp 192.168.49.2:8443: connect: connection refused'(may retry after sleeping)
Mar 06 13:36:15 minikube kubelet[1891]: E0306 13:36:15.568332 1891 kuberuntime_manager.go:905] container &Container{Name:kube-apiserver,Image:registry.k8s.io/kube-apiserver:v1.24.17,Command:[kube-apiserver --advertise-address=192.168.49.2 --allow-privileged=true --authorization-mode=Node,RBAC --client-ca-file=/var/lib/minikube/certs/ca.crt --enable-admission-plugins=NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota --enable-bootstrap-token-auth=true --etcd-cafile=/var/lib/minikube/certs/etcd/ca.crt --etcd-certfile=/var/lib/minikube/certs/apiserver-etcd-client.crt --etcd-keyfile=/var/lib/minikube/certs/apiserver-etcd-client.key --etcd-servers=https://127.0.0.1:2379 --kubelet-client-certificate=/var/lib/minikube/certs/apiserver-kubelet-client.crt --kubelet-client-key=/var/lib/minikube/certs/apiserver-kubelet-client.key --kubelet-preferred-address-types=InternalIP,ExternalIP,Hostname --proxy-client-cert-file=/var/lib/minikube/certs/front-proxy-client.crt --proxy-client-key-file=/var/lib/minikube/certs/front-proxy-client.key --requestheader-allowed-names=front-proxy-client --requestheader-client-ca-file=/var/lib/minikube/certs/front-proxy-ca.crt --requestheader-extra-headers-prefix=X-Remote-Extra- --requestheader-group-headers=X-Remote-Group --requestheader-username-headers=X-Remote-User --secure-port=8443 --service-account-issuer=https://kubernetes.default.svc.cluster.local --service-account-key-file=/var/lib/minikube/certs/sa.pub --service-account-signing-key-file=/var/lib/minikube/certs/sa.key --service-cluster-ip-range=10.96.0.0/12 --tls-cert-file=/var/lib/minikube/certs/apiserver.crt --tls-private-key-file=/var/lib/minikube/certs/apiserver.key],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{250 -3} {<nil>} 250m DecimalSI},},},VolumeMounts:[]VolumeMount{VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/ssl/certs,SubPath:,MountPropagation:nil,SubPathExpr:,},VolumeMount{Name:etc-ca-certificates,ReadOnly:true,MountPath:/etc/ca-certificates,SubPath:,MountPropagation:nil,SubPathExpr:,},VolumeMount{Name:k8s-certs,ReadOnly:true,MountPath:/var/lib/minikube/certs,SubPath:,MountPropagation:nil,SubPathExpr:,},VolumeMount{Name:usr-local-share-ca-certificates,ReadOnly:true,MountPath:/usr/local/share/ca-certificates,SubPath:,MountPropagation:nil,SubPathExpr:,},VolumeMount{Name:usr-share-ca-certificates,ReadOnly:true,MountPath:/usr/share/ca-certificates,SubPath:,MountPropagation:nil,SubPathExpr:,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8443 },Host:192.168.49.2,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:15,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:8,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8443 },Host:192.168.49.2,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:15,PeriodSeconds:1,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8443 },Host:192.168.49.2,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:15,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:24,TerminationGracePeriodSeconds:nil,},} start failed in pod kube-apiserver-minikube_kube-system(23abaa28abb68e28acf76909897fb6ec): CreateContainerConfigError: open /var/lib/kubelet/pods/23abaa28abb68e28acf76909897fb6ec/etc-hosts: read-only file system
Mar 06 13:36:15 minikube kubelet[1891]: E0306 13:36:15.568438 1891 pod_workers.go:965] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver\" with CreateContainerConfigError: \"open /var/lib/kubelet/pods/23abaa28abb68e28acf76909897fb6ec/etc-hosts: read-only file system\"" pod="kube-system/kube-apiserver-minikube" podUID=23abaa28abb68e28acf76909897fb6ec
Mar 06 13:36:15 minikube kubelet[1891]: E0306 13:36:15.907437 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?resourceVersion=0&timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:15 minikube kubelet[1891]: E0306 13:36:15.907731 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:15 minikube kubelet[1891]: E0306 13:36:15.908034 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:15 minikube kubelet[1891]: E0306 13:36:15.908413 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:15 minikube kubelet[1891]: E0306 13:36:15.908659 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:15 minikube kubelet[1891]: E0306 13:36:15.908700 1891 kubelet_node_status.go:447] "Unable to update node status" err="update node status exceeds retry count"
Mar 06 13:36:16 minikube kubelet[1891]: E0306 13:36:16.156463 1891 controller.go:144] failed to ensure lease exists, will retry in 7s, error: Get "https://control-plane.minikube.internal:8443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/minikube?timeout=10s": dial tcp 192.168.49.2:8443: connect: connection refused
Mar 06 13:36:17 minikube kubelet[1891]: I0306 13:36:17.562223 1891 status_manager.go:664] "Failed to get status for pod" podUID=9486e5c9-3e59-4a8d-801c-f93969178abd pod="jx-git-operator/jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s" err="Get \"https://control-plane.minikube.internal:8443/api/v1/namespaces/jx-git-operator/pods/jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:17 minikube kubelet[1891]: E0306 13:36:17.563919 1891 pod_workers.go:965] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"git-clone\" with ImagePullBackOff: \"Back-off pulling image \\\"ghcr.io/jenkins-x/jx-boot:3.10.136\\\"\"" pod="jx-git-operator/jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s" podUID=9486e5c9-3e59-4a8d-801c-f93969178abd
Mar 06 13:36:17 minikube kubelet[1891]: E0306 13:36:17.565868 1891 kuberuntime_manager.go:905] container &Container{Name:etcd,Image:registry.k8s.io/etcd:3.5.6-0,Command:[etcd --advertise-client-urls=https://192.168.49.2:2379 --cert-file=/var/lib/minikube/certs/etcd/server.crt --client-cert-auth=true --data-dir=/var/lib/minikube/etcd --experimental-initial-corrupt-check=true --initial-advertise-peer-urls=https://192.168.49.2:2380 --initial-cluster=minikube=https://192.168.49.2:2380 --key-file=/var/lib/minikube/certs/etcd/server.key --listen-client-urls=https://127.0.0.1:2379,https://192.168.49.2:2379 --listen-metrics-urls=http://127.0.0.1:2381 --listen-peer-urls=https://192.168.49.2:2380 --name=minikube --peer-cert-file=/var/lib/minikube/certs/etcd/peer.crt --peer-client-cert-auth=true --peer-key-file=/var/lib/minikube/certs/etcd/peer.key --peer-trusted-ca-file=/var/lib/minikube/certs/etcd/ca.crt --proxy-refresh-interval=70000 --snapshot-count=10000 --trusted-ca-file=/var/lib/minikube/certs/etcd/ca.crt],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{100 -3} {<nil>} 100m DecimalSI},memory: {{104857600 0} {<nil>} 100Mi BinarySI},},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etcd-data,ReadOnly:false,MountPath:/var/lib/minikube/etcd,SubPath:,MountPropagation:nil,SubPathExpr:,},VolumeMount{Name:etcd-certs,ReadOnly:false,MountPath:/var/lib/minikube/certs/etcd,SubPath:,MountPropagation:nil,SubPathExpr:,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/health,Port:{0 2381 },Host:127.0.0.1,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:15,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:8,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/health,Port:{0 2381 },Host:127.0.0.1,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:15,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:24,TerminationGracePeriodSeconds:nil,},} start failed in pod etcd-minikube_kube-system(367a235a0a591dcca7fb45c93337a4bb): CreateContainerConfigError: open /var/lib/kubelet/pods/367a235a0a591dcca7fb45c93337a4bb/etc-hosts: read-only file system
Mar 06 13:36:17 minikube kubelet[1891]: E0306 13:36:17.565947 1891 pod_workers.go:965] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"etcd\" with CreateContainerConfigError: \"open /var/lib/kubelet/pods/367a235a0a591dcca7fb45c93337a4bb/etc-hosts: read-only file system\"" pod="kube-system/etcd-minikube" podUID=367a235a0a591dcca7fb45c93337a4bb
Mar 06 13:36:21 minikube kubelet[1891]: I0306 13:36:21.562198 1891 status_manager.go:664] "Failed to get status for pod" podUID=23abaa28abb68e28acf76909897fb6ec pod="kube-system/kube-apiserver-minikube" err="Get \"https://control-plane.minikube.internal:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-minikube\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:21 minikube kubelet[1891]: I0306 13:36:21.562438 1891 status_manager.go:664] "Failed to get status for pod" podUID=9486e5c9-3e59-4a8d-801c-f93969178abd pod="jx-git-operator/jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s" err="Get \"https://control-plane.minikube.internal:8443/api/v1/namespaces/jx-git-operator/pods/jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:21 minikube kubelet[1891]: E0306 13:36:21.971375 1891 event.go:276] Unable to write event: '&v1.Event{TypeMeta:v1.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:v1.ObjectMeta{Name:"kube-apiserver-minikube.17ba2fff65f5db93", GenerateName:"", Namespace:"kube-system", SelfLink:"", UID:"", ResourceVersion:"", Generation:0, CreationTimestamp:time.Date(1, time.January, 1, 0, 0, 0, 0, time.UTC), DeletionTimestamp:<nil>, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string(nil), Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ZZZ_DeprecatedClusterName:"", ManagedFields:[]v1.ManagedFieldsEntry(nil)}, InvolvedObject:v1.ObjectReference{Kind:"Pod", Namespace:"kube-system", Name:"kube-apiserver-minikube", UID:"23abaa28abb68e28acf76909897fb6ec", APIVersion:"v1", ResourceVersion:"", FieldPath:"spec.containers{kube-apiserver}"}, Reason:"Unhealthy", Message:"Readiness probe failed: HTTP probe failed with statuscode: 500", Source:v1.EventSource{Component:"kubelet", Host:"minikube"}, FirstTimestamp:time.Date(2024, time.March, 6, 13, 29, 42, 514301843, time.Local), LastTimestamp:time.Date(2024, time.March, 6, 13, 29, 50, 545722985, time.Local), Count:5, Type:"Warning", EventTime:time.Date(1, time.January, 1, 0, 0, 0, 0, time.UTC), Series:(*v1.EventSeries)(nil), Action:"", Related:(*v1.ObjectReference)(nil), ReportingController:"", ReportingInstance:""}': 'Patch "https://control-plane.minikube.internal:8443/api/v1/namespaces/kube-system/events/kube-apiserver-minikube.17ba2fff65f5db93": dial tcp 192.168.49.2:8443: connect: connection refused'(may retry after sleeping)
Mar 06 13:36:23 minikube kubelet[1891]: E0306 13:36:23.158248 1891 controller.go:144] failed to ensure lease exists, will retry in 7s, error: Get "https://control-plane.minikube.internal:8443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/minikube?timeout=10s": dial tcp 192.168.49.2:8443: connect: connection refused
Mar 06 13:36:26 minikube kubelet[1891]: E0306 13:36:26.000172 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?resourceVersion=0&timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:26 minikube kubelet[1891]: E0306 13:36:26.000435 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:26 minikube kubelet[1891]: E0306 13:36:26.000731 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:26 minikube kubelet[1891]: E0306 13:36:26.001045 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:26 minikube kubelet[1891]: E0306 13:36:26.001294 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:26 minikube kubelet[1891]: E0306 13:36:26.001334 1891 kubelet_node_status.go:447] "Unable to update node status" err="update node status exceeds retry count"
Mar 06 13:36:30 minikube kubelet[1891]: E0306 13:36:30.159133 1891 controller.go:144] failed to ensure lease exists, will retry in 7s, error: Get "https://control-plane.minikube.internal:8443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/minikube?timeout=10s": dial tcp 192.168.49.2:8443: connect: connection refused
Mar 06 13:36:30 minikube kubelet[1891]: E0306 13:36:30.564694 1891 pod_workers.go:965] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"git-clone\" with ImagePullBackOff: \"Back-off pulling image \\\"ghcr.io/jenkins-x/jx-boot:3.10.136\\\"\"" pod="jx-git-operator/jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s" podUID=9486e5c9-3e59-4a8d-801c-f93969178abd
Mar 06 13:36:30 minikube kubelet[1891]: E0306 13:36:30.566376 1891 kuberuntime_manager.go:905] container &Container{Name:kube-apiserver,Image:registry.k8s.io/kube-apiserver:v1.24.17,Command:[kube-apiserver --advertise-address=192.168.49.2 --allow-privileged=true --authorization-mode=Node,RBAC --client-ca-file=/var/lib/minikube/certs/ca.crt --enable-admission-plugins=NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota --enable-bootstrap-token-auth=true --etcd-cafile=/var/lib/minikube/certs/etcd/ca.crt --etcd-certfile=/var/lib/minikube/certs/apiserver-etcd-client.crt --etcd-keyfile=/var/lib/minikube/certs/apiserver-etcd-client.key --etcd-servers=https://127.0.0.1:2379 --kubelet-client-certificate=/var/lib/minikube/certs/apiserver-kubelet-client.crt --kubelet-client-key=/var/lib/minikube/certs/apiserver-kubelet-client.key --kubelet-preferred-address-types=InternalIP,ExternalIP,Hostname --proxy-client-cert-file=/var/lib/minikube/certs/front-proxy-client.crt --proxy-client-key-file=/var/lib/minikube/certs/front-proxy-client.key --requestheader-allowed-names=front-proxy-client --requestheader-client-ca-file=/var/lib/minikube/certs/front-proxy-ca.crt --requestheader-extra-headers-prefix=X-Remote-Extra- --requestheader-group-headers=X-Remote-Group --requestheader-username-headers=X-Remote-User --secure-port=8443 --service-account-issuer=https://kubernetes.default.svc.cluster.local --service-account-key-file=/var/lib/minikube/certs/sa.pub --service-account-signing-key-file=/var/lib/minikube/certs/sa.key --service-cluster-ip-range=10.96.0.0/12 --tls-cert-file=/var/lib/minikube/certs/apiserver.crt --tls-private-key-file=/var/lib/minikube/certs/apiserver.key],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{250 -3} {<nil>} 250m DecimalSI},},},VolumeMounts:[]VolumeMount{VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/ssl/certs,SubPath:,MountPropagation:nil,SubPathExpr:,},VolumeMount{Name:etc-ca-certificates,ReadOnly:true,MountPath:/etc/ca-certificates,SubPath:,MountPropagation:nil,SubPathExpr:,},VolumeMount{Name:k8s-certs,ReadOnly:true,MountPath:/var/lib/minikube/certs,SubPath:,MountPropagation:nil,SubPathExpr:,},VolumeMount{Name:usr-local-share-ca-certificates,ReadOnly:true,MountPath:/usr/local/share/ca-certificates,SubPath:,MountPropagation:nil,SubPathExpr:,},VolumeMount{Name:usr-share-ca-certificates,ReadOnly:true,MountPath:/usr/share/ca-certificates,SubPath:,MountPropagation:nil,SubPathExpr:,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8443 },Host:192.168.49.2,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:15,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:8,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8443 },Host:192.168.49.2,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:15,PeriodSeconds:1,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8443 },Host:192.168.49.2,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:15,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:24,TerminationGracePeriodSeconds:nil,},} start failed in pod kube-apiserver-minikube_kube-system(23abaa28abb68e28acf76909897fb6ec): CreateContainerConfigError: open /var/lib/kubelet/pods/23abaa28abb68e28acf76909897fb6ec/etc-hosts: read-only file system
Mar 06 13:36:30 minikube kubelet[1891]: E0306 13:36:30.566437 1891 pod_workers.go:965] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver\" with CreateContainerConfigError: \"open /var/lib/kubelet/pods/23abaa28abb68e28acf76909897fb6ec/etc-hosts: read-only file system\"" pod="kube-system/kube-apiserver-minikube" podUID=23abaa28abb68e28acf76909897fb6ec
Mar 06 13:36:31 minikube kubelet[1891]: I0306 13:36:31.563460 1891 status_manager.go:664] "Failed to get status for pod" podUID=23abaa28abb68e28acf76909897fb6ec pod="kube-system/kube-apiserver-minikube" err="Get \"https://control-plane.minikube.internal:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-minikube\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:31 minikube kubelet[1891]: I0306 13:36:31.564109 1891 status_manager.go:664] "Failed to get status for pod" podUID=9486e5c9-3e59-4a8d-801c-f93969178abd pod="jx-git-operator/jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s" err="Get \"https://control-plane.minikube.internal:8443/api/v1/namespaces/jx-git-operator/pods/jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:31 minikube kubelet[1891]: E0306 13:36:31.972874 1891 event.go:276] Unable to write event: '&v1.Event{TypeMeta:v1.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:v1.ObjectMeta{Name:"kube-apiserver-minikube.17ba2fff65f5db93", GenerateName:"", Namespace:"kube-system", SelfLink:"", UID:"", ResourceVersion:"", Generation:0, CreationTimestamp:time.Date(1, time.January, 1, 0, 0, 0, 0, time.UTC), DeletionTimestamp:<nil>, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string(nil), Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ZZZ_DeprecatedClusterName:"", ManagedFields:[]v1.ManagedFieldsEntry(nil)}, InvolvedObject:v1.ObjectReference{Kind:"Pod", Namespace:"kube-system", Name:"kube-apiserver-minikube", UID:"23abaa28abb68e28acf76909897fb6ec", APIVersion:"v1", ResourceVersion:"", FieldPath:"spec.containers{kube-apiserver}"}, Reason:"Unhealthy", Message:"Readiness probe failed: HTTP probe failed with statuscode: 500", Source:v1.EventSource{Component:"kubelet", Host:"minikube"}, FirstTimestamp:time.Date(2024, time.March, 6, 13, 29, 42, 514301843, time.Local), LastTimestamp:time.Date(2024, time.March, 6, 13, 29, 50, 545722985, time.Local), Count:5, Type:"Warning", EventTime:time.Date(1, time.January, 1, 0, 0, 0, 0, time.UTC), Series:(*v1.EventSeries)(nil), Action:"", Related:(*v1.ObjectReference)(nil), ReportingController:"", ReportingInstance:""}': 'Patch "https://control-plane.minikube.internal:8443/api/v1/namespaces/kube-system/events/kube-apiserver-minikube.17ba2fff65f5db93": dial tcp 192.168.49.2:8443: connect: connection refused'(may retry after sleeping)
Mar 06 13:36:32 minikube kubelet[1891]: E0306 13:36:32.566735 1891 kuberuntime_manager.go:905] container &Container{Name:etcd,Image:registry.k8s.io/etcd:3.5.6-0,Command:[etcd --advertise-client-urls=https://192.168.49.2:2379 --cert-file=/var/lib/minikube/certs/etcd/server.crt --client-cert-auth=true --data-dir=/var/lib/minikube/etcd --experimental-initial-corrupt-check=true --initial-advertise-peer-urls=https://192.168.49.2:2380 --initial-cluster=minikube=https://192.168.49.2:2380 --key-file=/var/lib/minikube/certs/etcd/server.key --listen-client-urls=https://127.0.0.1:2379,https://192.168.49.2:2379 --listen-metrics-urls=http://127.0.0.1:2381 --listen-peer-urls=https://192.168.49.2:2380 --name=minikube --peer-cert-file=/var/lib/minikube/certs/etcd/peer.crt --peer-client-cert-auth=true --peer-key-file=/var/lib/minikube/certs/etcd/peer.key --peer-trusted-ca-file=/var/lib/minikube/certs/etcd/ca.crt --proxy-refresh-interval=70000 --snapshot-count=10000 --trusted-ca-file=/var/lib/minikube/certs/etcd/ca.crt],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{100 -3} {<nil>} 100m DecimalSI},memory: {{104857600 0} {<nil>} 100Mi BinarySI},},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etcd-data,ReadOnly:false,MountPath:/var/lib/minikube/etcd,SubPath:,MountPropagation:nil,SubPathExpr:,},VolumeMount{Name:etcd-certs,ReadOnly:false,MountPath:/var/lib/minikube/certs/etcd,SubPath:,MountPropagation:nil,SubPathExpr:,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/health,Port:{0 2381 },Host:127.0.0.1,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:15,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:8,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/health,Port:{0 2381 },Host:127.0.0.1,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:15,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:24,TerminationGracePeriodSeconds:nil,},} start failed in pod etcd-minikube_kube-system(367a235a0a591dcca7fb45c93337a4bb): CreateContainerConfigError: open /var/lib/kubelet/pods/367a235a0a591dcca7fb45c93337a4bb/etc-hosts: read-only file system
Mar 06 13:36:32 minikube kubelet[1891]: E0306 13:36:32.566787 1891 pod_workers.go:965] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"etcd\" with CreateContainerConfigError: \"open /var/lib/kubelet/pods/367a235a0a591dcca7fb45c93337a4bb/etc-hosts: read-only file system\"" pod="kube-system/etcd-minikube" podUID=367a235a0a591dcca7fb45c93337a4bb
Mar 06 13:36:36 minikube kubelet[1891]: E0306 13:36:36.331561 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?resourceVersion=0&timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:36 minikube kubelet[1891]: E0306 13:36:36.331939 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:36 minikube kubelet[1891]: E0306 13:36:36.332197 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:36 minikube kubelet[1891]: E0306 13:36:36.332436 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:36 minikube kubelet[1891]: E0306 13:36:36.332805 1891 kubelet_node_status.go:460] "Error updating node status, will retry" err="error getting node \"minikube\": Get \"https://control-plane.minikube.internal:8443/api/v1/nodes/minikube?timeout=10s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:36 minikube kubelet[1891]: E0306 13:36:36.332838 1891 kubelet_node_status.go:447] "Unable to update node status" err="update node status exceeds retry count"
Mar 06 13:36:37 minikube kubelet[1891]: E0306 13:36:37.160770 1891 controller.go:144] failed to ensure lease exists, will retry in 7s, error: Get "https://control-plane.minikube.internal:8443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/minikube?timeout=10s": dial tcp 192.168.49.2:8443: connect: connection refused
Mar 06 13:36:41 minikube kubelet[1891]: I0306 13:36:41.563179 1891 status_manager.go:664] "Failed to get status for pod" podUID=9486e5c9-3e59-4a8d-801c-f93969178abd pod="jx-git-operator/jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s" err="Get \"https://control-plane.minikube.internal:8443/api/v1/namespaces/jx-git-operator/pods/jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:41 minikube kubelet[1891]: I0306 13:36:41.563614 1891 status_manager.go:664] "Failed to get status for pod" podUID=23abaa28abb68e28acf76909897fb6ec pod="kube-system/kube-apiserver-minikube" err="Get \"https://control-plane.minikube.internal:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-minikube\": dial tcp 192.168.49.2:8443: connect: connection refused"
Mar 06 13:36:41 minikube kubelet[1891]: E0306 13:36:41.973390 1891 event.go:276] Unable to write event: '&v1.Event{TypeMeta:v1.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:v1.ObjectMeta{Name:"kube-apiserver-minikube.17ba2fff65f5db93", GenerateName:"", Namespace:"kube-system", SelfLink:"", UID:"", ResourceVersion:"", Generation:0, CreationTimestamp:time.Date(1, time.January, 1, 0, 0, 0, 0, time.UTC), DeletionTimestamp:<nil>, DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string(nil), Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ZZZ_DeprecatedClusterName:"", ManagedFields:[]v1.ManagedFieldsEntry(nil)}, InvolvedObject:v1.ObjectReference{Kind:"Pod", Namespace:"kube-system", Name:"kube-apiserver-minikube", UID:"23abaa28abb68e28acf76909897fb6ec", APIVersion:"v1", ResourceVersion:"", FieldPath:"spec.containers{kube-apiserver}"}, Reason:"Unhealthy", Message:"Readiness probe failed: HTTP probe failed with statuscode: 500", Source:v1.EventSource{Component:"kubelet", Host:"minikube"}, FirstTimestamp:time.Date(2024, time.March, 6, 13, 29, 42, 514301843, time.Local), LastTimestamp:time.Date(2024, time.March, 6, 13, 29, 50, 545722985, time.Local), Count:5, Type:"Warning", EventTime:time.Date(1, time.January, 1, 0, 0, 0, 0, time.UTC), Series:(*v1.EventSeries)(nil), Action:"", Related:(*v1.ObjectReference)(nil), ReportingController:"", ReportingInstance:""}': 'Patch "https://control-plane.minikube.internal:8443/api/v1/namespaces/kube-system/events/kube-apiserver-minikube.17ba2fff65f5db93": dial tcp 192.168.49.2:8443: connect: connection refused'(may retry after sleeping)
Mar 06 13:36:43 minikube kubelet[1891]: E0306 13:36:43.564963 1891 pod_workers.go:965] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"git-clone\" with ImagePullBackOff: \"Back-off pulling image \\\"ghcr.io/jenkins-x/jx-boot:3.10.136\\\"\"" pod="jx-git-operator/jx-boot-d4960258-2c87-4b3a-87df-b044dae40519-mc28s" podUID=9486e5c9-3e59-4a8d-801c-f93969178abd
*
* ==> storage-provisioner [11ccd35e8694] <==
*
* ==> storage-provisioner [1c817bd69f8e] <==