This job view page is being replaced by Spyglass soon. Check out the new job view.
PRmkimuram: Move disruptive tests to testsuites and add ones for block volume
ResultFAILURE
Tests 1 failed / 1692 succeeded
Started2019-06-13 01:09
Elapsed29m57s
Revision
Buildergke-prow-containerd-pool-bigger-170c3937-92f4
Refs master:4f29960c
75071:69886d05
podd83ca448-8d77-11e9-a90f-0293453c48ce
infra-commitbf4a71fd9
podd83ca448-8d77-11e9-a90f-0293453c48ce
repok8s.io/kubernetes
repo-commit926cf4a58d8a98556a0e6ef15cf948ed00f60773
repos{u'k8s.io/kubernetes': u'master:4f29960cb2058c4f556bf1e5ba49c9a29a501e70,75071:69886d05b4491e50063ab050ab64e493f6959cde'}

Test Failures


k8s.io/kubernetes/test/integration/scheduler TestNodePIDPressure 34s

go test -v k8s.io/kubernetes/test/integration/scheduler -run TestNodePIDPressure$
I0613 01:30:38.628062  109680 services.go:33] Network range for service cluster IPs is unspecified. Defaulting to {10.0.0.0 ffffff00}.
I0613 01:30:38.628087  109680 services.go:45] Setting service IP to "10.0.0.1" (read-write).
I0613 01:30:38.628098  109680 master.go:277] Node port range unspecified. Defaulting to 30000-32767.
I0613 01:30:38.628109  109680 master.go:233] Using reconciler: 
I0613 01:30:38.630274  109680 storage_factory.go:285] storing podtemplates in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.630511  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.630582  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.630667  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.630752  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.631505  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.631614  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.631971  109680 store.go:1343] Monitoring podtemplates count at <storage-prefix>//podtemplates
I0613 01:30:38.632141  109680 storage_factory.go:285] storing events in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.632071  109680 reflector.go:160] Listing and watching *core.PodTemplate from storage/cacher.go:/podtemplates
I0613 01:30:38.633567  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.634579  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.634630  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.634707  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.634793  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.635727  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.635814  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.635983  109680 store.go:1343] Monitoring events count at <storage-prefix>//events
I0613 01:30:38.636028  109680 storage_factory.go:285] storing limitranges in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.636109  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.636127  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.636166  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.636223  109680 reflector.go:160] Listing and watching *core.Event from storage/cacher.go:/events
I0613 01:30:38.636514  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.637093  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.637227  109680 store.go:1343] Monitoring limitranges count at <storage-prefix>//limitranges
I0613 01:30:38.637267  109680 storage_factory.go:285] storing resourcequotas in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.637333  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.637342  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.637374  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.637426  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.637463  109680 reflector.go:160] Listing and watching *core.LimitRange from storage/cacher.go:/limitranges
I0613 01:30:38.637638  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.638060  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.638149  109680 store.go:1343] Monitoring resourcequotas count at <storage-prefix>//resourcequotas
I0613 01:30:38.638316  109680 storage_factory.go:285] storing secrets in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.638382  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.638419  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.638453  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.638521  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.638575  109680 reflector.go:160] Listing and watching *core.ResourceQuota from storage/cacher.go:/resourcequotas
I0613 01:30:38.638757  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.638920  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.639122  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.639473  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.639562  109680 store.go:1343] Monitoring secrets count at <storage-prefix>//secrets
I0613 01:30:38.639701  109680 storage_factory.go:285] storing persistentvolumes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.639762  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.639772  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.639801  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.639842  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.640330  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.640512  109680 reflector.go:160] Listing and watching *core.Secret from storage/cacher.go:/secrets
I0613 01:30:38.641508  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.641676  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.642917  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.642986  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.643177  109680 store.go:1343] Monitoring persistentvolumes count at <storage-prefix>//persistentvolumes
I0613 01:30:38.643271  109680 reflector.go:160] Listing and watching *core.PersistentVolume from storage/cacher.go:/persistentvolumes
I0613 01:30:38.643510  109680 storage_factory.go:285] storing persistentvolumeclaims in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.643827  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.643923  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.644008  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.644156  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.644400  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.645583  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.645882  109680 store.go:1343] Monitoring persistentvolumeclaims count at <storage-prefix>//persistentvolumeclaims
I0613 01:30:38.646031  109680 storage_factory.go:285] storing configmaps in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.646139  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.646157  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.646201  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.646253  109680 reflector.go:160] Listing and watching *core.PersistentVolumeClaim from storage/cacher.go:/persistentvolumeclaims
I0613 01:30:38.646462  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.646792  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.646928  109680 store.go:1343] Monitoring configmaps count at <storage-prefix>//configmaps
I0613 01:30:38.647055  109680 storage_factory.go:285] storing namespaces in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.647115  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.647125  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.647155  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.647193  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.647222  109680 reflector.go:160] Listing and watching *core.ConfigMap from storage/cacher.go:/configmaps
I0613 01:30:38.647422  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.647644  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.647994  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.648103  109680 store.go:1343] Monitoring namespaces count at <storage-prefix>//namespaces
I0613 01:30:38.648237  109680 storage_factory.go:285] storing endpoints in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.648309  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.648320  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.648352  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.648416  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.648444  109680 reflector.go:160] Listing and watching *core.Namespace from storage/cacher.go:/namespaces
I0613 01:30:38.648581  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.648852  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.648981  109680 store.go:1343] Monitoring endpoints count at <storage-prefix>//services/endpoints
I0613 01:30:38.649110  109680 storage_factory.go:285] storing nodes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.649169  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.649179  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.649208  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.649274  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.649313  109680 reflector.go:160] Listing and watching *core.Endpoints from storage/cacher.go:/services/endpoints
I0613 01:30:38.649464  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.649485  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.649680  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.649708  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.649803  109680 store.go:1343] Monitoring nodes count at <storage-prefix>//minions
I0613 01:30:38.649849  109680 reflector.go:160] Listing and watching *core.Node from storage/cacher.go:/minions
I0613 01:30:38.649967  109680 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.650036  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.650046  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.650078  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.650110  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.650190  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.650558  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.650771  109680 store.go:1343] Monitoring pods count at <storage-prefix>//pods
I0613 01:30:38.651024  109680 storage_factory.go:285] storing serviceaccounts in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.651150  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.651169  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.651230  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.651335  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.651404  109680 reflector.go:160] Listing and watching *core.Pod from storage/cacher.go:/pods
I0613 01:30:38.651495  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.651560  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.651819  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.651887  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.651984  109680 store.go:1343] Monitoring serviceaccounts count at <storage-prefix>//serviceaccounts
I0613 01:30:38.652023  109680 reflector.go:160] Listing and watching *core.ServiceAccount from storage/cacher.go:/serviceaccounts
I0613 01:30:38.652116  109680 storage_factory.go:285] storing services in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.652180  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.652190  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.652246  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.652324  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.653046  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.653140  109680 store.go:1343] Monitoring services count at <storage-prefix>//services/specs
I0613 01:30:38.653166  109680 storage_factory.go:285] storing services in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.653242  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.653251  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.653290  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.653302  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.653321  109680 reflector.go:160] Listing and watching *core.Service from storage/cacher.go:/services/specs
I0613 01:30:38.653362  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.653431  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.653650  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.653723  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.653731  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.653755  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.653757  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.653798  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.653808  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.654144  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.654323  109680 storage_factory.go:285] storing replicationcontrollers in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.654388  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.654398  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.654426  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.654474  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.654514  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.655185  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.655354  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.655417  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.656688  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.656789  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.657050  109680 store.go:1343] Monitoring replicationcontrollers count at <storage-prefix>//controllers
I0613 01:30:38.657136  109680 reflector.go:160] Listing and watching *core.ReplicationController from storage/cacher.go:/controllers
I0613 01:30:38.657846  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.658514  109680 storage_factory.go:285] storing bindings in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.658797  109680 storage_factory.go:285] storing componentstatuses in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.659789  109680 storage_factory.go:285] storing configmaps in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.660472  109680 storage_factory.go:285] storing endpoints in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.661157  109680 storage_factory.go:285] storing events in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.661960  109680 storage_factory.go:285] storing limitranges in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.662412  109680 storage_factory.go:285] storing namespaces in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.662670  109680 storage_factory.go:285] storing namespaces in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.662996  109680 storage_factory.go:285] storing namespaces in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.663480  109680 storage_factory.go:285] storing nodes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.664185  109680 storage_factory.go:285] storing nodes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.664504  109680 storage_factory.go:285] storing nodes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.665313  109680 storage_factory.go:285] storing persistentvolumeclaims in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.665673  109680 storage_factory.go:285] storing persistentvolumeclaims in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.666235  109680 storage_factory.go:285] storing persistentvolumes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.666533  109680 storage_factory.go:285] storing persistentvolumes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.667282  109680 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.667582  109680 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.667794  109680 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.667989  109680 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.668227  109680 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.668429  109680 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.668660  109680 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.669432  109680 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.669813  109680 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.670649  109680 storage_factory.go:285] storing podtemplates in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.671640  109680 storage_factory.go:285] storing replicationcontrollers in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.672046  109680 storage_factory.go:285] storing replicationcontrollers in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.672387  109680 storage_factory.go:285] storing replicationcontrollers in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.673142  109680 storage_factory.go:285] storing resourcequotas in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.673501  109680 storage_factory.go:285] storing resourcequotas in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.674323  109680 storage_factory.go:285] storing secrets in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.675179  109680 storage_factory.go:285] storing serviceaccounts in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.675874  109680 storage_factory.go:285] storing services in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.676730  109680 storage_factory.go:285] storing services in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.677078  109680 storage_factory.go:285] storing services in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.677269  109680 master.go:417] Skipping disabled API group "auditregistration.k8s.io".
I0613 01:30:38.677335  109680 master.go:425] Enabling API group "authentication.k8s.io".
I0613 01:30:38.677371  109680 master.go:425] Enabling API group "authorization.k8s.io".
I0613 01:30:38.677593  109680 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.677757  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.677807  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.677908  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.678050  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.678815  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.679275  109680 store.go:1343] Monitoring horizontalpodautoscalers.autoscaling count at <storage-prefix>//horizontalpodautoscalers
I0613 01:30:38.679456  109680 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.679542  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.679562  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.679609  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.679677  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.679737  109680 reflector.go:160] Listing and watching *autoscaling.HorizontalPodAutoscaler from storage/cacher.go:/horizontalpodautoscalers
I0613 01:30:38.680014  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.680378  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.680529  109680 store.go:1343] Monitoring horizontalpodautoscalers.autoscaling count at <storage-prefix>//horizontalpodautoscalers
I0613 01:30:38.680677  109680 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.680750  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.680766  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.681530  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.681635  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.681678  109680 reflector.go:160] Listing and watching *autoscaling.HorizontalPodAutoscaler from storage/cacher.go:/horizontalpodautoscalers
I0613 01:30:38.681985  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.682074  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.682219  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.682495  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.683545  109680 store.go:1343] Monitoring horizontalpodautoscalers.autoscaling count at <storage-prefix>//horizontalpodautoscalers
I0613 01:30:38.683596  109680 reflector.go:160] Listing and watching *autoscaling.HorizontalPodAutoscaler from storage/cacher.go:/horizontalpodautoscalers
I0613 01:30:38.683328  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.683982  109680 master.go:425] Enabling API group "autoscaling".
I0613 01:30:38.684302  109680 storage_factory.go:285] storing jobs.batch in batch/v1, reading as batch/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.684415  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.684425  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.684450  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.684619  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.684725  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.685321  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.685350  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.685484  109680 store.go:1343] Monitoring jobs.batch count at <storage-prefix>//jobs
I0613 01:30:38.685633  109680 storage_factory.go:285] storing cronjobs.batch in batch/v1beta1, reading as batch/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.685700  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.685709  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.685737  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.685781  109680 reflector.go:160] Listing and watching *batch.Job from storage/cacher.go:/jobs
I0613 01:30:38.686208  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.686454  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.686586  109680 store.go:1343] Monitoring cronjobs.batch count at <storage-prefix>//cronjobs
I0613 01:30:38.686603  109680 master.go:425] Enabling API group "batch".
I0613 01:30:38.686640  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.686680  109680 reflector.go:160] Listing and watching *batch.CronJob from storage/cacher.go:/cronjobs
I0613 01:30:38.686740  109680 storage_factory.go:285] storing certificatesigningrequests.certificates.k8s.io in certificates.k8s.io/v1beta1, reading as certificates.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.686811  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.686823  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.686852  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.686920  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.687018  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.687367  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.687697  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.688153  109680 store.go:1343] Monitoring certificatesigningrequests.certificates.k8s.io count at <storage-prefix>//certificatesigningrequests
I0613 01:30:38.688180  109680 master.go:425] Enabling API group "certificates.k8s.io".
I0613 01:30:38.688250  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.688318  109680 storage_factory.go:285] storing leases.coordination.k8s.io in coordination.k8s.io/v1beta1, reading as coordination.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.688387  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.688397  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.688429  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.688499  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.688549  109680 reflector.go:160] Listing and watching *certificates.CertificateSigningRequest from storage/cacher.go:/certificatesigningrequests
I0613 01:30:38.688721  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.689233  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.689313  109680 reflector.go:160] Listing and watching *coordination.Lease from storage/cacher.go:/leases
I0613 01:30:38.689237  109680 store.go:1343] Monitoring leases.coordination.k8s.io count at <storage-prefix>//leases
I0613 01:30:38.689454  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.689515  109680 storage_factory.go:285] storing leases.coordination.k8s.io in coordination.k8s.io/v1beta1, reading as coordination.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.689593  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.689609  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.689639  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.689873  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.690186  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.690252  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.690194  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.690554  109680 store.go:1343] Monitoring leases.coordination.k8s.io count at <storage-prefix>//leases
I0613 01:30:38.690610  109680 master.go:425] Enabling API group "coordination.k8s.io".
I0613 01:30:38.690942  109680 reflector.go:160] Listing and watching *coordination.Lease from storage/cacher.go:/leases
I0613 01:30:38.691483  109680 storage_factory.go:285] storing replicationcontrollers in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.691580  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.691633  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.691683  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.691752  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.691799  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.692364  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.692430  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.692646  109680 store.go:1343] Monitoring replicationcontrollers count at <storage-prefix>//controllers
I0613 01:30:38.692811  109680 storage_factory.go:285] storing daemonsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.693105  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.693174  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.693215  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.692929  109680 reflector.go:160] Listing and watching *core.ReplicationController from storage/cacher.go:/controllers
I0613 01:30:38.693383  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.694048  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.694068  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.694146  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.694303  109680 store.go:1343] Monitoring daemonsets.apps count at <storage-prefix>//daemonsets
I0613 01:30:38.694454  109680 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.694528  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.694547  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.694726  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.694645  109680 reflector.go:160] Listing and watching *apps.DaemonSet from storage/cacher.go:/daemonsets
I0613 01:30:38.694845  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.695525  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.695563  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.695789  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.695954  109680 store.go:1343] Monitoring deployments.apps count at <storage-prefix>//deployments
I0613 01:30:38.696043  109680 reflector.go:160] Listing and watching *apps.Deployment from storage/cacher.go:/deployments
I0613 01:30:38.696732  109680 storage_factory.go:285] storing ingresses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.696767  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.696906  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.696941  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.696986  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.697068  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.697887  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.698037  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.698102  109680 store.go:1343] Monitoring ingresses.networking.k8s.io count at <storage-prefix>//ingress
I0613 01:30:38.698135  109680 reflector.go:160] Listing and watching *networking.Ingress from storage/cacher.go:/ingress
I0613 01:30:38.698356  109680 storage_factory.go:285] storing podsecuritypolicies.policy in policy/v1beta1, reading as policy/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.698461  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.698944  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.699031  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.699109  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.699180  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.699579  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.699641  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.699944  109680 store.go:1343] Monitoring podsecuritypolicies.policy count at <storage-prefix>//podsecuritypolicy
I0613 01:30:38.700018  109680 reflector.go:160] Listing and watching *policy.PodSecurityPolicy from storage/cacher.go:/podsecuritypolicy
I0613 01:30:38.700243  109680 storage_factory.go:285] storing replicasets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.700434  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.700501  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.700572  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.700645  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.700832  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.701246  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.701344  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.701374  109680 store.go:1343] Monitoring replicasets.apps count at <storage-prefix>//replicasets
I0613 01:30:38.701465  109680 reflector.go:160] Listing and watching *apps.ReplicaSet from storage/cacher.go:/replicasets
I0613 01:30:38.701537  109680 storage_factory.go:285] storing networkpolicies.networking.k8s.io in networking.k8s.io/v1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.701603  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.701614  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.701649  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.701703  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.702181  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.702268  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.702301  109680 store.go:1343] Monitoring networkpolicies.networking.k8s.io count at <storage-prefix>//networkpolicies
I0613 01:30:38.702318  109680 master.go:425] Enabling API group "extensions".
I0613 01:30:38.702440  109680 reflector.go:160] Listing and watching *networking.NetworkPolicy from storage/cacher.go:/networkpolicies
I0613 01:30:38.702450  109680 storage_factory.go:285] storing networkpolicies.networking.k8s.io in networking.k8s.io/v1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.702515  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.702525  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.702556  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.702638  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.702947  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.703098  109680 store.go:1343] Monitoring networkpolicies.networking.k8s.io count at <storage-prefix>//networkpolicies
I0613 01:30:38.703270  109680 storage_factory.go:285] storing ingresses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.703379  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.703422  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.703480  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.703563  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.703621  109680 reflector.go:160] Listing and watching *networking.NetworkPolicy from storage/cacher.go:/networkpolicies
I0613 01:30:38.703816  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.704186  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.704604  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.705049  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.705138  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.705235  109680 store.go:1343] Monitoring ingresses.networking.k8s.io count at <storage-prefix>//ingress
I0613 01:30:38.705390  109680 reflector.go:160] Listing and watching *networking.Ingress from storage/cacher.go:/ingress
I0613 01:30:38.705541  109680 master.go:425] Enabling API group "networking.k8s.io".
I0613 01:30:38.705603  109680 storage_factory.go:285] storing runtimeclasses.node.k8s.io in node.k8s.io/v1beta1, reading as node.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.705708  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.705744  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.705789  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.705874  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.706193  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.706354  109680 store.go:1343] Monitoring runtimeclasses.node.k8s.io count at <storage-prefix>//runtimeclasses
I0613 01:30:38.706428  109680 master.go:425] Enabling API group "node.k8s.io".
I0613 01:30:38.706461  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.706540  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.706620  109680 reflector.go:160] Listing and watching *node.RuntimeClass from storage/cacher.go:/runtimeclasses
I0613 01:30:38.706199  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.706652  109680 storage_factory.go:285] storing poddisruptionbudgets.policy in policy/v1beta1, reading as policy/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.706791  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.706800  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.706826  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.706884  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.707232  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.707312  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.707453  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.708087  109680 store.go:1343] Monitoring poddisruptionbudgets.policy count at <storage-prefix>//poddisruptionbudgets
I0613 01:30:38.708204  109680 reflector.go:160] Listing and watching *policy.PodDisruptionBudget from storage/cacher.go:/poddisruptionbudgets
I0613 01:30:38.708303  109680 storage_factory.go:285] storing podsecuritypolicies.policy in policy/v1beta1, reading as policy/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.708477  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.708491  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.708548  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.708616  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.709155  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.709187  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.709267  109680 store.go:1343] Monitoring podsecuritypolicies.policy count at <storage-prefix>//podsecuritypolicy
I0613 01:30:38.709284  109680 master.go:425] Enabling API group "policy".
I0613 01:30:38.709312  109680 storage_factory.go:285] storing roles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.709394  109680 reflector.go:160] Listing and watching *policy.PodSecurityPolicy from storage/cacher.go:/podsecuritypolicy
I0613 01:30:38.709419  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.710516  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.710560  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.710574  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.710614  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.710663  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.710921  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.711030  109680 store.go:1343] Monitoring roles.rbac.authorization.k8s.io count at <storage-prefix>//roles
I0613 01:30:38.711148  109680 storage_factory.go:285] storing rolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.711207  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.711216  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.711246  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.711308  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.711338  109680 reflector.go:160] Listing and watching *rbac.Role from storage/cacher.go:/roles
I0613 01:30:38.711471  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.712002  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.712055  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.712103  109680 store.go:1343] Monitoring rolebindings.rbac.authorization.k8s.io count at <storage-prefix>//rolebindings
I0613 01:30:38.712138  109680 reflector.go:160] Listing and watching *rbac.RoleBinding from storage/cacher.go:/rolebindings
I0613 01:30:38.712131  109680 storage_factory.go:285] storing clusterroles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.712258  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.712268  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.712304  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.712340  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.712644  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.713238  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.713410  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.713537  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.713572  109680 store.go:1343] Monitoring clusterroles.rbac.authorization.k8s.io count at <storage-prefix>//clusterroles
I0613 01:30:38.713748  109680 reflector.go:160] Listing and watching *rbac.ClusterRole from storage/cacher.go:/clusterroles
I0613 01:30:38.714042  109680 storage_factory.go:285] storing clusterrolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.714113  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.714125  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.714153  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.714210  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.714608  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.714766  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.714624  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.716075  109680 store.go:1343] Monitoring clusterrolebindings.rbac.authorization.k8s.io count at <storage-prefix>//clusterrolebindings
I0613 01:30:38.716122  109680 storage_factory.go:285] storing roles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.716183  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.716192  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.716222  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.716273  109680 reflector.go:160] Listing and watching *rbac.ClusterRoleBinding from storage/cacher.go:/clusterrolebindings
I0613 01:30:38.716515  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.717519  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.717605  109680 store.go:1343] Monitoring roles.rbac.authorization.k8s.io count at <storage-prefix>//roles
I0613 01:30:38.717734  109680 storage_factory.go:285] storing rolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.717806  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.717816  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.717844  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.717909  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.717941  109680 reflector.go:160] Listing and watching *rbac.Role from storage/cacher.go:/roles
I0613 01:30:38.718158  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.718392  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.718481  109680 store.go:1343] Monitoring rolebindings.rbac.authorization.k8s.io count at <storage-prefix>//rolebindings
I0613 01:30:38.718508  109680 storage_factory.go:285] storing clusterroles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.718561  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.718570  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.718599  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.718642  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.718681  109680 reflector.go:160] Listing and watching *rbac.RoleBinding from storage/cacher.go:/rolebindings
I0613 01:30:38.718947  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.719232  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.719299  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.719347  109680 store.go:1343] Monitoring clusterroles.rbac.authorization.k8s.io count at <storage-prefix>//clusterroles
I0613 01:30:38.719401  109680 reflector.go:160] Listing and watching *rbac.ClusterRole from storage/cacher.go:/clusterroles
I0613 01:30:38.719722  109680 storage_factory.go:285] storing clusterrolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.720112  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.720132  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.720164  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.720204  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.720447  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.720530  109680 store.go:1343] Monitoring clusterrolebindings.rbac.authorization.k8s.io count at <storage-prefix>//clusterrolebindings
I0613 01:30:38.720556  109680 master.go:425] Enabling API group "rbac.authorization.k8s.io".
I0613 01:30:38.721462  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.721519  109680 reflector.go:160] Listing and watching *rbac.ClusterRoleBinding from storage/cacher.go:/clusterrolebindings
I0613 01:30:38.722560  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.722562  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.722795  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.722681  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.724286  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.725105  109680 storage_factory.go:285] storing priorityclasses.scheduling.k8s.io in scheduling.k8s.io/v1, reading as scheduling.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.725223  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.725265  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.725319  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.725399  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.725957  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.726340  109680 store.go:1343] Monitoring priorityclasses.scheduling.k8s.io count at <storage-prefix>//priorityclasses
I0613 01:30:38.726521  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.726579  109680 storage_factory.go:285] storing priorityclasses.scheduling.k8s.io in scheduling.k8s.io/v1, reading as scheduling.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.726705  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.726747  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.727574  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.726624  109680 reflector.go:160] Listing and watching *scheduling.PriorityClass from storage/cacher.go:/priorityclasses
I0613 01:30:38.727843  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.728635  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.728985  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.729249  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.729492  109680 store.go:1343] Monitoring priorityclasses.scheduling.k8s.io count at <storage-prefix>//priorityclasses
I0613 01:30:38.729537  109680 master.go:425] Enabling API group "scheduling.k8s.io".
I0613 01:30:38.729677  109680 master.go:417] Skipping disabled API group "settings.k8s.io".
I0613 01:30:38.729897  109680 storage_factory.go:285] storing storageclasses.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.729989  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.730001  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.730052  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.730107  109680 reflector.go:160] Listing and watching *scheduling.PriorityClass from storage/cacher.go:/priorityclasses
I0613 01:30:38.730398  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.730736  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.730933  109680 store.go:1343] Monitoring storageclasses.storage.k8s.io count at <storage-prefix>//storageclasses
I0613 01:30:38.731093  109680 storage_factory.go:285] storing volumeattachments.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.731161  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.731172  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.731209  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.731332  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.731370  109680 reflector.go:160] Listing and watching *storage.StorageClass from storage/cacher.go:/storageclasses
I0613 01:30:38.731587  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.732516  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.732619  109680 store.go:1343] Monitoring volumeattachments.storage.k8s.io count at <storage-prefix>//volumeattachments
I0613 01:30:38.732649  109680 storage_factory.go:285] storing csinodes.storage.k8s.io in storage.k8s.io/v1beta1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.732739  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.732752  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.732784  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.732944  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.732986  109680 reflector.go:160] Listing and watching *storage.VolumeAttachment from storage/cacher.go:/volumeattachments
I0613 01:30:38.733209  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.733602  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.734046  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.734381  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.734507  109680 store.go:1343] Monitoring csinodes.storage.k8s.io count at <storage-prefix>//csinodes
I0613 01:30:38.734537  109680 storage_factory.go:285] storing csidrivers.storage.k8s.io in storage.k8s.io/v1beta1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.734602  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.734612  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.734648  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.734772  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.734877  109680 reflector.go:160] Listing and watching *storage.CSINode from storage/cacher.go:/csinodes
I0613 01:30:38.735105  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.736632  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.736782  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.737588  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.737660  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.737740  109680 store.go:1343] Monitoring csidrivers.storage.k8s.io count at <storage-prefix>//csidrivers
I0613 01:30:38.737807  109680 reflector.go:160] Listing and watching *storage.CSIDriver from storage/cacher.go:/csidrivers
I0613 01:30:38.737988  109680 storage_factory.go:285] storing storageclasses.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.738100  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.738115  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.738164  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.738224  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.738489  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.738573  109680 store.go:1343] Monitoring storageclasses.storage.k8s.io count at <storage-prefix>//storageclasses
I0613 01:30:38.738727  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.738740  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.738714  109680 storage_factory.go:285] storing volumeattachments.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.738822  109680 reflector.go:160] Listing and watching *storage.StorageClass from storage/cacher.go:/storageclasses
I0613 01:30:38.738828  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.738838  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.738886  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.739035  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.739366  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.739474  109680 store.go:1343] Monitoring volumeattachments.storage.k8s.io count at <storage-prefix>//volumeattachments
I0613 01:30:38.739493  109680 master.go:425] Enabling API group "storage.k8s.io".
I0613 01:30:38.739629  109680 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.739690  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.739700  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.739741  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.739782  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.739814  109680 reflector.go:160] Listing and watching *storage.VolumeAttachment from storage/cacher.go:/volumeattachments
I0613 01:30:38.739936  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.740154  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.740185  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.740339  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.740547  109680 store.go:1343] Monitoring deployments.apps count at <storage-prefix>//deployments
I0613 01:30:38.740685  109680 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.740748  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.740758  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.740789  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.740875  109680 reflector.go:160] Listing and watching *apps.Deployment from storage/cacher.go:/deployments
I0613 01:30:38.741018  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.741676  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.741801  109680 store.go:1343] Monitoring statefulsets.apps count at <storage-prefix>//statefulsets
I0613 01:30:38.741956  109680 storage_factory.go:285] storing controllerrevisions.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.742040  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.742047  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.742057  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.742087  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.742112  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.742152  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.742315  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.742545  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.742652  109680 store.go:1343] Monitoring controllerrevisions.apps count at <storage-prefix>//controllerrevisions
I0613 01:30:38.742780  109680 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.742838  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.742848  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.742882  109680 reflector.go:160] Listing and watching *apps.StatefulSet from storage/cacher.go:/statefulsets
I0613 01:30:38.742909  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.742963  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.742998  109680 reflector.go:160] Listing and watching *apps.ControllerRevision from storage/cacher.go:/controllerrevisions
I0613 01:30:38.743148  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.743428  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.743470  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.743537  109680 store.go:1343] Monitoring deployments.apps count at <storage-prefix>//deployments
I0613 01:30:38.743658  109680 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.743711  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.743721  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.743744  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.743790  109680 reflector.go:160] Listing and watching *apps.Deployment from storage/cacher.go:/deployments
I0613 01:30:38.744002  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.744081  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.744309  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.744429  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.744439  109680 store.go:1343] Monitoring statefulsets.apps count at <storage-prefix>//statefulsets
I0613 01:30:38.744489  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.744542  109680 reflector.go:160] Listing and watching *apps.StatefulSet from storage/cacher.go:/statefulsets
I0613 01:30:38.744574  109680 storage_factory.go:285] storing daemonsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.744633  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.744644  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.744674  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.744732  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.745052  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.745166  109680 store.go:1343] Monitoring daemonsets.apps count at <storage-prefix>//daemonsets
I0613 01:30:38.745293  109680 storage_factory.go:285] storing replicasets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.745355  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.745364  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.745396  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.745433  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.745462  109680 reflector.go:160] Listing and watching *apps.DaemonSet from storage/cacher.go:/daemonsets
I0613 01:30:38.745648  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.745952  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.746056  109680 store.go:1343] Monitoring replicasets.apps count at <storage-prefix>//replicasets
I0613 01:30:38.746184  109680 storage_factory.go:285] storing controllerrevisions.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.746240  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.746249  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.746275  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.746352  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.746371  109680 reflector.go:160] Listing and watching *apps.ReplicaSet from storage/cacher.go:/replicasets
I0613 01:30:38.746580  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.746626  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.746774  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.746850  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.746905  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.746968  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.747004  109680 store.go:1343] Monitoring controllerrevisions.apps count at <storage-prefix>//controllerrevisions
I0613 01:30:38.747024  109680 reflector.go:160] Listing and watching *apps.ControllerRevision from storage/cacher.go:/controllerrevisions
I0613 01:30:38.747129  109680 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.747197  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.747211  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.747247  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.747309  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.748206  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.748598  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.748824  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.749054  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.749241  109680 store.go:1343] Monitoring deployments.apps count at <storage-prefix>//deployments
I0613 01:30:38.749280  109680 reflector.go:160] Listing and watching *apps.Deployment from storage/cacher.go:/deployments
I0613 01:30:38.749379  109680 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.749444  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.749458  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.749493  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.749537  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.749818  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.749946  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.749950  109680 store.go:1343] Monitoring statefulsets.apps count at <storage-prefix>//statefulsets
I0613 01:30:38.749973  109680 reflector.go:160] Listing and watching *apps.StatefulSet from storage/cacher.go:/statefulsets
I0613 01:30:38.750136  109680 storage_factory.go:285] storing daemonsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.750203  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.750212  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.750239  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.750295  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.750574  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.750692  109680 store.go:1343] Monitoring daemonsets.apps count at <storage-prefix>//daemonsets
I0613 01:30:38.750826  109680 storage_factory.go:285] storing replicasets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.750916  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.750928  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.750958  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.751008  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.751040  109680 reflector.go:160] Listing and watching *apps.DaemonSet from storage/cacher.go:/daemonsets
I0613 01:30:38.751219  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.751554  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.751530  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.751724  109680 store.go:1343] Monitoring replicasets.apps count at <storage-prefix>//replicasets
I0613 01:30:38.751971  109680 storage_factory.go:285] storing controllerrevisions.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.752073  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.752083  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.752116  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.752155  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.752188  109680 reflector.go:160] Listing and watching *apps.ReplicaSet from storage/cacher.go:/replicasets
I0613 01:30:38.752396  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.752746  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.752824  109680 store.go:1343] Monitoring controllerrevisions.apps count at <storage-prefix>//controllerrevisions
I0613 01:30:38.752839  109680 master.go:425] Enabling API group "apps".
I0613 01:30:38.752893  109680 storage_factory.go:285] storing validatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.752952  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.752962  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.752991  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.753031  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.753064  109680 reflector.go:160] Listing and watching *apps.ControllerRevision from storage/cacher.go:/controllerrevisions
I0613 01:30:38.753285  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.753651  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.754062  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.754169  109680 store.go:1343] Monitoring validatingwebhookconfigurations.admissionregistration.k8s.io count at <storage-prefix>//validatingwebhookconfigurations
I0613 01:30:38.754195  109680 storage_factory.go:285] storing mutatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.754266  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.754277  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.754306  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.754369  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.754421  109680 reflector.go:160] Listing and watching *admissionregistration.ValidatingWebhookConfiguration from storage/cacher.go:/validatingwebhookconfigurations
I0613 01:30:38.754506  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.754677  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.754840  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.755154  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.755236  109680 store.go:1343] Monitoring mutatingwebhookconfigurations.admissionregistration.k8s.io count at <storage-prefix>//mutatingwebhookconfigurations
I0613 01:30:38.755250  109680 master.go:425] Enabling API group "admissionregistration.k8s.io".
I0613 01:30:38.755291  109680 storage_factory.go:285] storing events in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.755471  109680 client.go:354] parsed scheme: ""
I0613 01:30:38.755483  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:38.755515  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:38.755557  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.755587  109680 reflector.go:160] Listing and watching *admissionregistration.MutatingWebhookConfiguration from storage/cacher.go:/mutatingwebhookconfigurations
I0613 01:30:38.755783  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.756049  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:38.756082  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.756143  109680 store.go:1343] Monitoring events count at <storage-prefix>//events
I0613 01:30:38.756157  109680 master.go:425] Enabling API group "events.k8s.io".
I0613 01:30:38.756359  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:38.756404  109680 reflector.go:160] Listing and watching *core.Event from storage/cacher.go:/events
I0613 01:30:38.757457  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.757691  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.758705  109680 storage_factory.go:285] storing tokenreviews.authentication.k8s.io in authentication.k8s.io/v1, reading as authentication.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.759161  109680 storage_factory.go:285] storing tokenreviews.authentication.k8s.io in authentication.k8s.io/v1, reading as authentication.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.759495  109680 storage_factory.go:285] storing localsubjectaccessreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.759609  109680 storage_factory.go:285] storing selfsubjectaccessreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.759711  109680 storage_factory.go:285] storing selfsubjectrulesreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.759805  109680 storage_factory.go:285] storing subjectaccessreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.760787  109680 storage_factory.go:285] storing localsubjectaccessreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.761015  109680 storage_factory.go:285] storing selfsubjectaccessreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.761119  109680 storage_factory.go:285] storing selfsubjectrulesreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.761209  109680 storage_factory.go:285] storing subjectaccessreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.762446  109680 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.763008  109680 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.764174  109680 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.764525  109680 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.765506  109680 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.765837  109680 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.767078  109680 watch_cache.go:405] Replace watchCache (rev: 24441) 
I0613 01:30:38.769398  109680 storage_factory.go:285] storing jobs.batch in batch/v1, reading as batch/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.769718  109680 storage_factory.go:285] storing jobs.batch in batch/v1, reading as batch/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.770592  109680 storage_factory.go:285] storing cronjobs.batch in batch/v1beta1, reading as batch/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.771217  109680 storage_factory.go:285] storing cronjobs.batch in batch/v1beta1, reading as batch/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
W0613 01:30:38.771335  109680 genericapiserver.go:351] Skipping API batch/v2alpha1 because it has no resources.
I0613 01:30:38.772048  109680 storage_factory.go:285] storing certificatesigningrequests.certificates.k8s.io in certificates.k8s.io/v1beta1, reading as certificates.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.772236  109680 storage_factory.go:285] storing certificatesigningrequests.certificates.k8s.io in certificates.k8s.io/v1beta1, reading as certificates.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.772502  109680 storage_factory.go:285] storing certificatesigningrequests.certificates.k8s.io in certificates.k8s.io/v1beta1, reading as certificates.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.773400  109680 storage_factory.go:285] storing leases.coordination.k8s.io in coordination.k8s.io/v1beta1, reading as coordination.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.774208  109680 storage_factory.go:285] storing leases.coordination.k8s.io in coordination.k8s.io/v1beta1, reading as coordination.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.775359  109680 storage_factory.go:285] storing daemonsets.extensions in extensions/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.775738  109680 storage_factory.go:285] storing daemonsets.extensions in extensions/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.776559  109680 storage_factory.go:285] storing deployments.extensions in extensions/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.776794  109680 storage_factory.go:285] storing deployments.extensions in extensions/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.777134  109680 storage_factory.go:285] storing deployments.extensions in extensions/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.777572  109680 storage_factory.go:285] storing deployments.extensions in extensions/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.779004  109680 storage_factory.go:285] storing ingresses.extensions in extensions/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.779370  109680 storage_factory.go:285] storing ingresses.extensions in extensions/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.780220  109680 storage_factory.go:285] storing networkpolicies.extensions in extensions/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.780972  109680 storage_factory.go:285] storing podsecuritypolicies.extensions in extensions/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.781771  109680 storage_factory.go:285] storing replicasets.extensions in extensions/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.782088  109680 storage_factory.go:285] storing replicasets.extensions in extensions/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.782420  109680 storage_factory.go:285] storing replicasets.extensions in extensions/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.782521  109680 storage_factory.go:285] storing replicationcontrollers.extensions in extensions/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.783305  109680 storage_factory.go:285] storing replicationcontrollers.extensions in extensions/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.791066  109680 storage_factory.go:285] storing networkpolicies.networking.k8s.io in networking.k8s.io/v1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.791875  109680 storage_factory.go:285] storing ingresses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.792230  109680 storage_factory.go:285] storing ingresses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.792943  109680 storage_factory.go:285] storing runtimeclasses.node.k8s.io in node.k8s.io/v1beta1, reading as node.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
W0613 01:30:38.793121  109680 genericapiserver.go:351] Skipping API node.k8s.io/v1alpha1 because it has no resources.
I0613 01:30:38.793925  109680 storage_factory.go:285] storing poddisruptionbudgets.policy in policy/v1beta1, reading as policy/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.794250  109680 storage_factory.go:285] storing poddisruptionbudgets.policy in policy/v1beta1, reading as policy/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.794833  109680 storage_factory.go:285] storing podsecuritypolicies.policy in policy/v1beta1, reading as policy/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.795655  109680 storage_factory.go:285] storing clusterrolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.796198  109680 storage_factory.go:285] storing clusterroles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.797237  109680 storage_factory.go:285] storing rolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.797935  109680 storage_factory.go:285] storing roles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.798514  109680 storage_factory.go:285] storing clusterrolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.799101  109680 storage_factory.go:285] storing clusterroles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.800036  109680 storage_factory.go:285] storing rolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.801064  109680 storage_factory.go:285] storing roles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
W0613 01:30:38.801212  109680 genericapiserver.go:351] Skipping API rbac.authorization.k8s.io/v1alpha1 because it has no resources.
I0613 01:30:38.801891  109680 storage_factory.go:285] storing priorityclasses.scheduling.k8s.io in scheduling.k8s.io/v1, reading as scheduling.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.802497  109680 storage_factory.go:285] storing priorityclasses.scheduling.k8s.io in scheduling.k8s.io/v1, reading as scheduling.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
W0613 01:30:38.802595  109680 genericapiserver.go:351] Skipping API scheduling.k8s.io/v1alpha1 because it has no resources.
I0613 01:30:38.803317  109680 storage_factory.go:285] storing storageclasses.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.804029  109680 storage_factory.go:285] storing volumeattachments.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.804331  109680 storage_factory.go:285] storing volumeattachments.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.805061  109680 storage_factory.go:285] storing csidrivers.storage.k8s.io in storage.k8s.io/v1beta1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.805603  109680 storage_factory.go:285] storing csinodes.storage.k8s.io in storage.k8s.io/v1beta1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.806354  109680 storage_factory.go:285] storing storageclasses.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.806947  109680 storage_factory.go:285] storing volumeattachments.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
W0613 01:30:38.807046  109680 genericapiserver.go:351] Skipping API storage.k8s.io/v1alpha1 because it has no resources.
I0613 01:30:38.807834  109680 storage_factory.go:285] storing controllerrevisions.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.808527  109680 storage_factory.go:285] storing daemonsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.809013  109680 storage_factory.go:285] storing daemonsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.809706  109680 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.810016  109680 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.810304  109680 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.810999  109680 storage_factory.go:285] storing replicasets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.811295  109680 storage_factory.go:285] storing replicasets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.811607  109680 storage_factory.go:285] storing replicasets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.812335  109680 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.812671  109680 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.812981  109680 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.814892  109680 storage_factory.go:285] storing controllerrevisions.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.815633  109680 storage_factory.go:285] storing daemonsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.815965  109680 storage_factory.go:285] storing daemonsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.816662  109680 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.816993  109680 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.817318  109680 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.818027  109680 storage_factory.go:285] storing replicasets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.818328  109680 storage_factory.go:285] storing replicasets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.818635  109680 storage_factory.go:285] storing replicasets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.819432  109680 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.819758  109680 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.820177  109680 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.821022  109680 storage_factory.go:285] storing controllerrevisions.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.821728  109680 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.821935  109680 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.822319  109680 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.822612  109680 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.823510  109680 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.823834  109680 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.824173  109680 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.825021  109680 storage_factory.go:285] storing mutatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.825659  109680 storage_factory.go:285] storing validatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.826476  109680 storage_factory.go:285] storing events.events.k8s.io in events.k8s.io/v1beta1, reading as events.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"a118c7f5-1b4f-420e-a362-3b5804dfb13f", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:""}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I0613 01:30:38.828793  109680 healthz.go:161] healthz check etcd failed: etcd client connection not yet established
I0613 01:30:38.828817  109680 healthz.go:161] healthz check poststarthook/bootstrap-controller failed: not finished
I0613 01:30:38.828828  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:38.828839  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:38.828847  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:38.828884  109680 healthz.go:175] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[-]poststarthook/bootstrap-controller failed: reason withheld
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:38.829018  109680 wrap.go:47] GET /healthz: (309.477µs) 500
goroutine 26826 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00c410bd0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00c410bd0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc00311fa20, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc00e327a40, 0xc00db09520, 0x18a, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc00e327a40, 0xc00d2d8000)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc00e327a40, 0xc00d2d8000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc00e327a40, 0xc00d2d8000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc00e327a40, 0xc00d2d8000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc00e327a40, 0xc00d2d8000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc00e327a40, 0xc00d2d8000)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc00e327a40, 0xc00d2d8000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc00e327a40, 0xc00d2d8000)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc00e327a40, 0xc00d2d8000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc00e327a40, 0xc00d2d8000)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc00e327a40, 0xc00d2d8000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc00e327a40, 0xc00e8f7f00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc00e327a40, 0xc00e8f7f00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc010e6e8a0, 0xc00c8aea40, 0x7423da0, 0xc00e327a40, 0xc00e8f7f00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[-]etcd failed: reason withheld\n[+]poststarthook/generic-apiserver-start-informers ok\n[-]poststarthook/bootstrap-controller failed: reason withheld\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56506]
I0613 01:30:38.829691  109680 wrap.go:47] GET /api/v1/namespaces/default/endpoints/kubernetes: (1.309178ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:38.831985  109680 wrap.go:47] GET /api/v1/services: (977.742µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:38.842338  109680 wrap.go:47] GET /api/v1/services: (1.351099ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:38.844956  109680 healthz.go:161] healthz check etcd failed: etcd client connection not yet established
I0613 01:30:38.844981  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:38.844995  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:38.845003  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:38.845010  109680 healthz.go:175] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:38.845159  109680 wrap.go:47] GET /healthz: (313.328µs) 500
goroutine 26854 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00c411650, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00c411650, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0037414a0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc00e327b50, 0xc002cdcc00, 0x175, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc00e327b50, 0xc00d2d9400)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc00e327b50, 0xc00d2d9400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc00e327b50, 0xc00d2d9400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc00e327b50, 0xc00d2d9400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc00e327b50, 0xc00d2d9400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc00e327b50, 0xc00d2d9400)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc00e327b50, 0xc00d2d9400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc00e327b50, 0xc00d2d9400)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc00e327b50, 0xc00d2d9400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc00e327b50, 0xc00d2d9400)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc00e327b50, 0xc00d2d9400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc00e327b50, 0xc00d2d9300)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc00e327b50, 0xc00d2d9300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc010e6f620, 0xc00c8aea40, 0x7423da0, 0xc00e327b50, 0xc00d2d9300)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[-]etcd failed: reason withheld\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56506]
I0613 01:30:38.847091  109680 wrap.go:47] GET /api/v1/namespaces/kube-system: (1.840462ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:38.849873  109680 wrap.go:47] POST /api/v1/namespaces: (2.061089ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:38.850067  109680 wrap.go:47] GET /api/v1/services: (2.839497ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56506]
I0613 01:30:38.851714  109680 wrap.go:47] GET /api/v1/namespaces/kube-public: (1.201729ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:38.857370  109680 wrap.go:47] POST /api/v1/namespaces: (5.211349ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:38.858828  109680 wrap.go:47] GET /api/v1/namespaces/kube-node-lease: (1.18843ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:38.860717  109680 wrap.go:47] GET /api/v1/services: (942.461µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56506]
I0613 01:30:38.863491  109680 wrap.go:47] POST /api/v1/namespaces: (2.349195ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:38.930023  109680 healthz.go:161] healthz check etcd failed: etcd client connection not yet established
I0613 01:30:38.930065  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:38.930077  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:38.930086  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:38.930095  109680 healthz.go:175] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:38.930260  109680 wrap.go:47] GET /healthz: (384.713µs) 500
goroutine 26648 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00c3ee3f0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00c3ee3f0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc00309ec60, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc00d9915c8, 0xc000c8c600, 0x175, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc00d9915c8, 0xc00d4a2500)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc00d9915c8, 0xc00d4a2500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc00d9915c8, 0xc00d4a2500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc00d9915c8, 0xc00d4a2500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc00d9915c8, 0xc00d4a2500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc00d9915c8, 0xc00d4a2500)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc00d9915c8, 0xc00d4a2500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc00d9915c8, 0xc00d4a2500)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc00d9915c8, 0xc00d4a2500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc00d9915c8, 0xc00d4a2500)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc00d9915c8, 0xc00d4a2500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc00d9915c8, 0xc00d4a2400)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc00d9915c8, 0xc00d4a2400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0102033e0, 0xc00c8aea40, 0x7423da0, 0xc00d9915c8, 0xc00d4a2400)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[-]etcd failed: reason withheld\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:38.946335  109680 healthz.go:161] healthz check etcd failed: etcd client connection not yet established
I0613 01:30:38.946374  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:38.946385  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:38.946394  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:38.946402  109680 healthz.go:175] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:38.946571  109680 wrap.go:47] GET /healthz: (386.346µs) 500
goroutine 26079 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00c456c40, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00c456c40, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0083c5ec0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc005ba7e20, 0xc006426780, 0x175, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc005ba7e20, 0xc00ac2ea00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc005ba7e20, 0xc00ac2ea00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc005ba7e20, 0xc00ac2ea00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc005ba7e20, 0xc00ac2ea00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc005ba7e20, 0xc00ac2ea00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc005ba7e20, 0xc00ac2ea00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc005ba7e20, 0xc00ac2ea00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc005ba7e20, 0xc00ac2ea00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc005ba7e20, 0xc00ac2ea00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc005ba7e20, 0xc00ac2ea00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc005ba7e20, 0xc00ac2ea00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc005ba7e20, 0xc00ac2e900)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc005ba7e20, 0xc00ac2e900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc00f1ed3e0, 0xc00c8aea40, 0x7423da0, 0xc005ba7e20, 0xc00ac2e900)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[-]etcd failed: reason withheld\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:39.031359  109680 healthz.go:161] healthz check etcd failed: etcd client connection not yet established
I0613 01:30:39.031397  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.031409  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:39.031418  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:39.031425  109680 healthz.go:175] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:39.031617  109680 wrap.go:47] GET /healthz: (401.655µs) 500
goroutine 26868 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0011fe5b0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0011fe5b0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0037d4d40, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc0032455a0, 0xc010ecef00, 0x175, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc0032455a0, 0xc00d3eb300)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc0032455a0, 0xc00d3eb300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc0032455a0, 0xc00d3eb300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc0032455a0, 0xc00d3eb300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc0032455a0, 0xc00d3eb300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc0032455a0, 0xc00d3eb300)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc0032455a0, 0xc00d3eb300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc0032455a0, 0xc00d3eb300)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc0032455a0, 0xc00d3eb300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc0032455a0, 0xc00d3eb300)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc0032455a0, 0xc00d3eb300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc0032455a0, 0xc00d3eb200)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc0032455a0, 0xc00d3eb200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0115d6840, 0xc00c8aea40, 0x7423da0, 0xc0032455a0, 0xc00d3eb200)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[-]etcd failed: reason withheld\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:39.046325  109680 healthz.go:161] healthz check etcd failed: etcd client connection not yet established
I0613 01:30:39.046364  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.046376  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:39.046384  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:39.046392  109680 healthz.go:175] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:39.046548  109680 wrap.go:47] GET /healthz: (369.072µs) 500
goroutine 26081 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00c456d90, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00c456d90, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0037e85e0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc005ba7e48, 0xc006426f00, 0x175, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc005ba7e48, 0xc00ac2f000)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc005ba7e48, 0xc00ac2f000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc005ba7e48, 0xc00ac2f000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc005ba7e48, 0xc00ac2f000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc005ba7e48, 0xc00ac2f000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc005ba7e48, 0xc00ac2f000)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc005ba7e48, 0xc00ac2f000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc005ba7e48, 0xc00ac2f000)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc005ba7e48, 0xc00ac2f000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc005ba7e48, 0xc00ac2f000)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc005ba7e48, 0xc00ac2f000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc005ba7e48, 0xc00ac2ef00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc005ba7e48, 0xc00ac2ef00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc00f1ed5c0, 0xc00c8aea40, 0x7423da0, 0xc005ba7e48, 0xc00ac2ef00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[-]etcd failed: reason withheld\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:39.129957  109680 healthz.go:161] healthz check etcd failed: etcd client connection not yet established
I0613 01:30:39.129996  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.130008  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:39.130017  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:39.130026  109680 healthz.go:175] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:39.130176  109680 wrap.go:47] GET /healthz: (364.745µs) 500
goroutine 26870 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0011fe700, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0011fe700, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0037d5180, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc0032455c8, 0xc010ecf680, 0x175, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc0032455c8, 0xc00d3eba00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc0032455c8, 0xc00d3eba00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc0032455c8, 0xc00d3eba00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc0032455c8, 0xc00d3eba00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc0032455c8, 0xc00d3eba00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc0032455c8, 0xc00d3eba00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc0032455c8, 0xc00d3eba00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc0032455c8, 0xc00d3eba00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc0032455c8, 0xc00d3eba00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc0032455c8, 0xc00d3eba00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc0032455c8, 0xc00d3eba00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc0032455c8, 0xc00d3eb900)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc0032455c8, 0xc00d3eb900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0115d6a20, 0xc00c8aea40, 0x7423da0, 0xc0032455c8, 0xc00d3eb900)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[-]etcd failed: reason withheld\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:39.146321  109680 healthz.go:161] healthz check etcd failed: etcd client connection not yet established
I0613 01:30:39.146356  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.146368  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:39.146377  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:39.146385  109680 healthz.go:175] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:39.146562  109680 wrap.go:47] GET /healthz: (383.499µs) 500
goroutine 26883 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00c456ee0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00c456ee0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0037e8bc0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc005ba7e88, 0xc006427680, 0x175, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc005ba7e88, 0xc00ac2f600)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc005ba7e88, 0xc00ac2f600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc005ba7e88, 0xc00ac2f600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc005ba7e88, 0xc00ac2f600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc005ba7e88, 0xc00ac2f600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc005ba7e88, 0xc00ac2f600)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc005ba7e88, 0xc00ac2f600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc005ba7e88, 0xc00ac2f600)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc005ba7e88, 0xc00ac2f600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc005ba7e88, 0xc00ac2f600)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc005ba7e88, 0xc00ac2f600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc005ba7e88, 0xc00ac2f500)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc005ba7e88, 0xc00ac2f500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc00f1ed8c0, 0xc00c8aea40, 0x7423da0, 0xc005ba7e88, 0xc00ac2f500)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[-]etcd failed: reason withheld\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:39.229944  109680 healthz.go:161] healthz check etcd failed: etcd client connection not yet established
I0613 01:30:39.229983  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.229996  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:39.230004  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:39.230013  109680 healthz.go:175] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:39.230149  109680 wrap.go:47] GET /healthz: (368.926µs) 500
goroutine 26650 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00c3ee4d0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00c3ee4d0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc00309efa0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc00d991610, 0xc000c8d200, 0x175, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc00d991610, 0xc00d4a2e00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc00d991610, 0xc00d4a2e00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc00d991610, 0xc00d4a2e00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc00d991610, 0xc00d4a2e00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc00d991610, 0xc00d4a2e00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc00d991610, 0xc00d4a2e00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc00d991610, 0xc00d4a2e00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc00d991610, 0xc00d4a2e00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc00d991610, 0xc00d4a2e00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc00d991610, 0xc00d4a2e00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc00d991610, 0xc00d4a2e00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc00d991610, 0xc00d4a2d00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc00d991610, 0xc00d4a2d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc010203680, 0xc00c8aea40, 0x7423da0, 0xc00d991610, 0xc00d4a2d00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[-]etcd failed: reason withheld\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:39.246465  109680 healthz.go:161] healthz check etcd failed: etcd client connection not yet established
I0613 01:30:39.246518  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.246530  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:39.246539  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:39.246548  109680 healthz.go:175] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:39.246690  109680 wrap.go:47] GET /healthz: (466.721µs) 500
goroutine 26872 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0011fe850, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0011fe850, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0037d5460, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc0032455f0, 0xc010ecfe00, 0x175, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc0032455f0, 0xc00d4ea100)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc0032455f0, 0xc00d4ea100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc0032455f0, 0xc00d4ea100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc0032455f0, 0xc00d4ea100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc0032455f0, 0xc00d4ea100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc0032455f0, 0xc00d4ea100)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc0032455f0, 0xc00d4ea100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc0032455f0, 0xc00d4ea100)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc0032455f0, 0xc00d4ea100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc0032455f0, 0xc00d4ea100)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc0032455f0, 0xc00d4ea100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc0032455f0, 0xc00d4ea000)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc0032455f0, 0xc00d4ea000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0115d6c00, 0xc00c8aea40, 0x7423da0, 0xc0032455f0, 0xc00d4ea000)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[-]etcd failed: reason withheld\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:39.329930  109680 healthz.go:161] healthz check etcd failed: etcd client connection not yet established
I0613 01:30:39.329966  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.329979  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:39.329987  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:39.330006  109680 healthz.go:175] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:39.330145  109680 wrap.go:47] GET /healthz: (374.067µs) 500
goroutine 26463 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00c447030, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00c447030, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc00376b9c0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc00c403530, 0xc00d50a300, 0x175, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc00c403530, 0xc00d394c00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc00c403530, 0xc00d394c00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc00c403530, 0xc00d394c00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc00c403530, 0xc00d394c00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc00c403530, 0xc00d394c00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc00c403530, 0xc00d394c00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc00c403530, 0xc00d394c00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc00c403530, 0xc00d394c00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc00c403530, 0xc00d394c00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc00c403530, 0xc00d394c00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc00c403530, 0xc00d394c00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc00c403530, 0xc00d394b00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc00c403530, 0xc00d394b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0116f8960, 0xc00c8aea40, 0x7423da0, 0xc00c403530, 0xc00d394b00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[-]etcd failed: reason withheld\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:39.346250  109680 healthz.go:161] healthz check etcd failed: etcd client connection not yet established
I0613 01:30:39.346325  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.346337  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:39.346346  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:39.346354  109680 healthz.go:175] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:39.346515  109680 wrap.go:47] GET /healthz: (385.44µs) 500
goroutine 26465 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00c447180, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00c447180, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc00376bbe0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc00c403538, 0xc00d50aa80, 0x175, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc00c403538, 0xc00d395000)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc00c403538, 0xc00d395000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc00c403538, 0xc00d395000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc00c403538, 0xc00d395000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc00c403538, 0xc00d395000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc00c403538, 0xc00d395000)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc00c403538, 0xc00d395000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc00c403538, 0xc00d395000)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc00c403538, 0xc00d395000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc00c403538, 0xc00d395000)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc00c403538, 0xc00d395000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc00c403538, 0xc00d394f00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc00c403538, 0xc00d394f00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0116f8b40, 0xc00c8aea40, 0x7423da0, 0xc00c403538, 0xc00d394f00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[-]etcd failed: reason withheld\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:39.429981  109680 healthz.go:161] healthz check etcd failed: etcd client connection not yet established
I0613 01:30:39.430021  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.430033  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:39.430042  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:39.430049  109680 healthz.go:175] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:39.430197  109680 wrap.go:47] GET /healthz: (371.211µs) 500
goroutine 26874 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0011fe9a0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0011fe9a0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0037d5b40, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc003245618, 0xc00d4ee780, 0x175, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc003245618, 0xc00d4ea800)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc003245618, 0xc00d4ea800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc003245618, 0xc00d4ea800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc003245618, 0xc00d4ea800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc003245618, 0xc00d4ea800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc003245618, 0xc00d4ea800)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc003245618, 0xc00d4ea800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc003245618, 0xc00d4ea800)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc003245618, 0xc00d4ea800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc003245618, 0xc00d4ea800)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc003245618, 0xc00d4ea800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc003245618, 0xc00d4ea700)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc003245618, 0xc00d4ea700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc00d5041e0, 0xc00c8aea40, 0x7423da0, 0xc003245618, 0xc00d4ea700)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[-]etcd failed: reason withheld\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:39.446373  109680 healthz.go:161] healthz check etcd failed: etcd client connection not yet established
I0613 01:30:39.446413  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.446423  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:39.446430  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:39.446435  109680 healthz.go:175] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:39.446586  109680 wrap.go:47] GET /healthz: (387.226µs) 500
goroutine 26876 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0011feaf0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0011feaf0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0037d5d20, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc003245640, 0xc00d4eed80, 0x175, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc003245640, 0xc00d4eaf00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc003245640, 0xc00d4eaf00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc003245640, 0xc00d4eaf00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc003245640, 0xc00d4eaf00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc003245640, 0xc00d4eaf00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc003245640, 0xc00d4eaf00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc003245640, 0xc00d4eaf00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc003245640, 0xc00d4eaf00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc003245640, 0xc00d4eaf00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc003245640, 0xc00d4eaf00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc003245640, 0xc00d4eaf00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc003245640, 0xc00d4eae00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc003245640, 0xc00d4eae00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc00d504360, 0xc00c8aea40, 0x7423da0, 0xc003245640, 0xc00d4eae00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[-]etcd failed: reason withheld\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:39.529970  109680 healthz.go:161] healthz check etcd failed: etcd client connection not yet established
I0613 01:30:39.530008  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.530019  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:39.530028  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:39.530038  109680 healthz.go:175] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:39.530196  109680 wrap.go:47] GET /healthz: (379.208µs) 500
goroutine 26878 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0011fec40, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0011fec40, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0037f6060, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc003245648, 0xc00d4ef500, 0x175, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc003245648, 0xc00d4eb300)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc003245648, 0xc00d4eb300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc003245648, 0xc00d4eb300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc003245648, 0xc00d4eb300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc003245648, 0xc00d4eb300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc003245648, 0xc00d4eb300)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc003245648, 0xc00d4eb300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc003245648, 0xc00d4eb300)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc003245648, 0xc00d4eb300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc003245648, 0xc00d4eb300)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc003245648, 0xc00d4eb300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc003245648, 0xc00d4eb200)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc003245648, 0xc00d4eb200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc00d504480, 0xc00c8aea40, 0x7423da0, 0xc003245648, 0xc00d4eb200)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[-]etcd failed: reason withheld\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:39.546208  109680 healthz.go:161] healthz check etcd failed: etcd client connection not yet established
I0613 01:30:39.546242  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.546254  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:39.546262  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:39.546268  109680 healthz.go:175] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:39.546388  109680 wrap.go:47] GET /healthz: (277.823µs) 500
goroutine 26880 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0011fed90, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0011fed90, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0037f6160, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc003245670, 0xc00d4efb00, 0x175, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc003245670, 0xc00d4eba00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc003245670, 0xc00d4eba00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc003245670, 0xc00d4eba00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc003245670, 0xc00d4eba00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc003245670, 0xc00d4eba00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc003245670, 0xc00d4eba00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc003245670, 0xc00d4eba00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc003245670, 0xc00d4eba00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc003245670, 0xc00d4eba00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc003245670, 0xc00d4eba00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc003245670, 0xc00d4eba00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc003245670, 0xc00d4eb900)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc003245670, 0xc00d4eb900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc00d504600, 0xc00c8aea40, 0x7423da0, 0xc003245670, 0xc00d4eb900)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[-]etcd failed: reason withheld\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:39.630016  109680 healthz.go:161] healthz check etcd failed: etcd client connection not yet established
I0613 01:30:39.630058  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.630070  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:39.630093  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:39.630101  109680 healthz.go:175] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:39.630250  109680 wrap.go:47] GET /healthz: (400.285µs) 500
goroutine 26885 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00c457030, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00c457030, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0037e92e0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc005ba7ec8, 0xc006427e00, 0x175, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc005ba7ec8, 0xc00ac2fc00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc005ba7ec8, 0xc00ac2fc00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc005ba7ec8, 0xc00ac2fc00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc005ba7ec8, 0xc00ac2fc00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc005ba7ec8, 0xc00ac2fc00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc005ba7ec8, 0xc00ac2fc00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc005ba7ec8, 0xc00ac2fc00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc005ba7ec8, 0xc00ac2fc00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc005ba7ec8, 0xc00ac2fc00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc005ba7ec8, 0xc00ac2fc00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc005ba7ec8, 0xc00ac2fc00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc005ba7ec8, 0xc00ac2fb00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc005ba7ec8, 0xc00ac2fb00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc00d55c060, 0xc00c8aea40, 0x7423da0, 0xc005ba7ec8, 0xc00ac2fb00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[-]etcd failed: reason withheld\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:39.630748  109680 client.go:354] parsed scheme: ""
I0613 01:30:39.630782  109680 client.go:354] scheme "" not registered, fallback to default scheme
I0613 01:30:39.630830  109680 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
I0613 01:30:39.630917  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:39.631452  109680 balancer_conn_wrappers.go:131] clientv3/balancer: pin "127.0.0.1:2379"
I0613 01:30:39.631549  109680 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I0613 01:30:39.647650  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.647677  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:39.647688  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:39.647698  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:39.647888  109680 wrap.go:47] GET /healthz: (1.699251ms) 500
goroutine 26656 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00c3ee620, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00c3ee620, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc00309f520, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc00d991690, 0xc00dbc02c0, 0x160, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc00d991690, 0xc00d4a3700)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc00d991690, 0xc00d4a3700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc00d991690, 0xc00d4a3700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc00d991690, 0xc00d4a3700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc00d991690, 0xc00d4a3700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc00d991690, 0xc00d4a3700)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc00d991690, 0xc00d4a3700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc00d991690, 0xc00d4a3700)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc00d991690, 0xc00d4a3700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc00d991690, 0xc00d4a3700)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc00d991690, 0xc00d4a3700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc00d991690, 0xc00d4a3600)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc00d991690, 0xc00d4a3600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc010203ce0, 0xc00c8aea40, 0x7423da0, 0xc00d991690, 0xc00d4a3600)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:39.731146  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.731181  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:39.731191  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:39.731199  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:39.731364  109680 wrap.go:47] GET /healthz: (1.553995ms) 500
goroutine 26906 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00c4473b0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00c4473b0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc003828440, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc00c403598, 0xc00cbbd080, 0x160, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc00c403598, 0xc00d395900)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc00c403598, 0xc00d395900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc00c403598, 0xc00d395900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc00c403598, 0xc00d395900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc00c403598, 0xc00d395900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc00c403598, 0xc00d395900)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc00c403598, 0xc00d395900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc00c403598, 0xc00d395900)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc00c403598, 0xc00d395900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc00c403598, 0xc00d395900)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc00c403598, 0xc00d395900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc00c403598, 0xc00d395800)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc00c403598, 0xc00d395800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0116f9b00, 0xc00c8aea40, 0x7423da0, 0xc00c403598, 0xc00d395800)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:39.747335  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.747369  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:39.747380  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:39.747387  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:39.747561  109680 wrap.go:47] GET /healthz: (1.443998ms) 500
goroutine 26914 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0011fefc0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0011fefc0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0037f6700, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc0032456c8, 0xc0060d3ce0, 0x160, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc0032456c8, 0xc00d5fa400)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc0032456c8, 0xc00d5fa400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc0032456c8, 0xc00d5fa400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc0032456c8, 0xc00d5fa400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc0032456c8, 0xc00d5fa400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc0032456c8, 0xc00d5fa400)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc0032456c8, 0xc00d5fa400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc0032456c8, 0xc00d5fa400)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc0032456c8, 0xc00d5fa400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc0032456c8, 0xc00d5fa400)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc0032456c8, 0xc00d5fa400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc0032456c8, 0xc00d5fa300)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc0032456c8, 0xc00d5fa300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc00d5048a0, 0xc00c8aea40, 0x7423da0, 0xc0032456c8, 0xc00d5fa300)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:39.832738  109680 wrap.go:47] GET /api/v1/namespaces/kube-system: (2.667356ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56506]
I0613 01:30:39.833914  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.833938  109680 healthz.go:161] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0613 01:30:39.833948  109680 healthz.go:161] healthz check poststarthook/ca-registration failed: not finished
I0613 01:30:39.833955  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I0613 01:30:39.834091  109680 wrap.go:47] GET /healthz: (3.474056ms) 500
goroutine 26889 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00c4573b0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00c4573b0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc003838760, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc005ba7f78, 0xc0062d4c60, 0x160, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc005ba7f78, 0xc00d5aad00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc005ba7f78, 0xc00d5aad00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc005ba7f78, 0xc00d5aad00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc005ba7f78, 0xc00d5aad00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc005ba7f78, 0xc00d5aad00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc005ba7f78, 0xc00d5aad00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc005ba7f78, 0xc00d5aad00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc005ba7f78, 0xc00d5aad00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc005ba7f78, 0xc00d5aad00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc005ba7f78, 0xc00d5aad00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc005ba7f78, 0xc00d5aad00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc005ba7f78, 0xc00d5aac00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc005ba7f78, 0xc00d5aac00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc00d55c720, 0xc00c8aea40, 0x7423da0, 0xc005ba7f78, 0xc00d5aac00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:39.835989  109680 wrap.go:47] GET /api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication: (1.43373ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56506]
I0613 01:30:39.836198  109680 wrap.go:47] GET /apis/scheduling.k8s.io/v1beta1/priorityclasses/system-node-critical: (2.996264ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.836388  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.83844ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56540]
I0613 01:30:39.842025  109680 wrap.go:47] POST /api/v1/namespaces/kube-system/configmaps: (4.963081ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56506]
I0613 01:30:39.842291  109680 wrap.go:47] POST /apis/scheduling.k8s.io/v1beta1/priorityclasses: (4.732248ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.842416  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (4.499943ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56540]
I0613 01:30:39.842491  109680 storage_scheduling.go:119] created PriorityClass system-node-critical with value 2000001000
I0613 01:30:39.844081  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:aggregate-to-admin: (1.228958ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56540]
I0613 01:30:39.844112  109680 wrap.go:47] GET /apis/scheduling.k8s.io/v1beta1/priorityclasses/system-cluster-critical: (1.485114ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.845712  109680 wrap.go:47] POST /apis/scheduling.k8s.io/v1beta1/priorityclasses: (1.302269ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.845928  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/admin: (1.476839ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:39.845949  109680 storage_scheduling.go:119] created PriorityClass system-cluster-critical with value 2000000000
I0613 01:30:39.845967  109680 storage_scheduling.go:128] all system priority classes are created successfully or already exist.
I0613 01:30:39.847035  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:aggregate-to-edit: (828.054µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:39.848198  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/edit: (882.674µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:39.849234  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:aggregate-to-view: (786.863µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:39.850730  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/view: (1.184789ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:39.851774  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.851804  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:39.851964  109680 wrap.go:47] GET /healthz: (5.742576ms) 500
goroutine 26916 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0011ff0a0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0011ff0a0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0037f7480, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc003245710, 0xc0044e1a40, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc003245710, 0xc00d5fac00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc003245710, 0xc00d5fac00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc003245710, 0xc00d5fac00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc003245710, 0xc00d5fac00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc003245710, 0xc00d5fac00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc003245710, 0xc00d5fac00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc003245710, 0xc00d5fac00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc003245710, 0xc00d5fac00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc003245710, 0xc00d5fac00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc003245710, 0xc00d5fac00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc003245710, 0xc00d5fac00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc003245710, 0xc00d5fab00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc003245710, 0xc00d5fab00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc00d504cc0, 0xc00c8aea40, 0x7423da0, 0xc003245710, 0xc00d5fab00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.852175  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:discovery: (1.1736ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:39.853201  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/cluster-admin: (707.526µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.855282  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.759138ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.855469  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/cluster-admin
I0613 01:30:39.856476  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:discovery: (887.37µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.858240  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.491056ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.858416  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:discovery
I0613 01:30:39.859403  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:basic-user: (873.59µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.861363  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.413273ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.861530  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:basic-user
I0613 01:30:39.864512  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:public-info-viewer: (2.861463ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.866405  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.592288ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.866600  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:public-info-viewer
I0613 01:30:39.867568  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/admin: (838.353µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.869936  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.09395ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.870094  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/admin
I0613 01:30:39.871342  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/edit: (1.127341ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.873055  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.366381ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.873584  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/edit
I0613 01:30:39.874590  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/view: (773.056µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.876243  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.335343ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.876417  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/view
I0613 01:30:39.877334  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:aggregate-to-admin: (798.728µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.878951  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.34624ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.879127  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:aggregate-to-admin
I0613 01:30:39.881558  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:aggregate-to-edit: (2.313173ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.887837  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (5.978103ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.888123  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:aggregate-to-edit
I0613 01:30:39.889321  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:aggregate-to-view: (1.03926ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.891311  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.6697ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.891533  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:aggregate-to-view
I0613 01:30:39.892556  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:heapster: (865.132µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.894209  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.384231ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.894506  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:heapster
I0613 01:30:39.896032  109680 cacher.go:740] cacher (*rbac.ClusterRole): 1 objects queued in incoming channel.
I0613 01:30:39.896498  109680 cacher.go:740] cacher (*rbac.ClusterRole): 2 objects queued in incoming channel.
I0613 01:30:39.896340  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:node: (1.622616ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.903596  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (6.521577ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.905046  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:node
I0613 01:30:39.906162  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:node-problem-detector: (850.121µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.908200  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.505109ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.908468  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:node-problem-detector
I0613 01:30:39.909626  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:node-proxier: (903.088µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.911683  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.558305ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.912250  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:node-proxier
I0613 01:30:39.913192  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:kubelet-api-admin: (732.285µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.915317  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.627082ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.915801  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:kubelet-api-admin
I0613 01:30:39.916937  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:node-bootstrapper: (907.462µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.919245  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.027665ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.919395  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:node-bootstrapper
I0613 01:30:39.920272  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:auth-delegator: (750.102µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.922139  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.543863ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.922321  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:auth-delegator
I0613 01:30:39.923276  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:kube-aggregator: (834.136µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.924948  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.399911ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.925113  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:kube-aggregator
I0613 01:30:39.926133  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:kube-controller-manager: (904.501µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.928161  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.6302ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.928353  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:kube-controller-manager
I0613 01:30:39.929377  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:kube-scheduler: (899.305µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.931063  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.931148  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:39.931469  109680 wrap.go:47] GET /healthz: (1.096619ms) 500
goroutine 26989 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00c401960, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00c401960, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc003f61640, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc0071db2e0, 0xc003cf8280, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc0071db2e0, 0xc00dddd500)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc0071db2e0, 0xc00dddd500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc0071db2e0, 0xc00dddd500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc0071db2e0, 0xc00dddd500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc0071db2e0, 0xc00dddd500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc0071db2e0, 0xc00dddd500)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc0071db2e0, 0xc00dddd500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc0071db2e0, 0xc00dddd500)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc0071db2e0, 0xc00dddd500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc0071db2e0, 0xc00dddd500)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc0071db2e0, 0xc00dddd500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc0071db2e0, 0xc00dddd400)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc0071db2e0, 0xc00dddd400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc011427a40, 0xc00c8aea40, 0x7423da0, 0xc0071db2e0, 0xc00dddd400)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:39.931935  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.183121ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.932358  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:kube-scheduler
I0613 01:30:39.933372  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:kube-dns: (734.192µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.935035  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.381999ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.935225  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:kube-dns
I0613 01:30:39.936241  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:persistent-volume-provisioner: (721.605µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.938391  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.210871ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.938628  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:persistent-volume-provisioner
I0613 01:30:39.939720  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:csi-external-attacher: (875.22µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.941471  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.384873ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.941731  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:csi-external-attacher
I0613 01:30:39.943055  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:certificates.k8s.io:certificatesigningrequests:nodeclient: (1.065722ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.945275  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.640156ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.945517  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:certificates.k8s.io:certificatesigningrequests:nodeclient
I0613 01:30:39.947397  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:39.947450  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:39.947734  109680 wrap.go:47] GET /healthz: (1.518435ms) 500
goroutine 27111 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0011832d0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0011832d0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc004075e00, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc006b2a570, 0xc0039da640, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc006b2a570, 0xc00e86f600)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc006b2a570, 0xc00e86f600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc006b2a570, 0xc00e86f600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc006b2a570, 0xc00e86f600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc006b2a570, 0xc00e86f600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc006b2a570, 0xc00e86f600)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc006b2a570, 0xc00e86f600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc006b2a570, 0xc00e86f600)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc006b2a570, 0xc00e86f600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc006b2a570, 0xc00e86f600)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc006b2a570, 0xc00e86f600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc006b2a570, 0xc00e86f500)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc006b2a570, 0xc00e86f500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc00dc718c0, 0xc00c8aea40, 0x7423da0, 0xc006b2a570, 0xc00e86f500)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:39.948094  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient: (2.37125ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.950941  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.216324ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.951142  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient
I0613 01:30:39.952394  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:volume-scheduler: (842.416µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.954214  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.53017ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.954383  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:volume-scheduler
I0613 01:30:39.955339  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:csi-external-provisioner: (837.03µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.957258  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.614733ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.957525  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:csi-external-provisioner
I0613 01:30:39.958454  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:attachdetach-controller: (740.092µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.960386  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.527509ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.960606  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:attachdetach-controller
I0613 01:30:39.961722  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:clusterrole-aggregation-controller: (979.741µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.963466  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.441171ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.963620  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:clusterrole-aggregation-controller
I0613 01:30:39.964468  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:cronjob-controller: (727.828µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.966193  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.445909ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.966373  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:cronjob-controller
I0613 01:30:39.967346  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:daemon-set-controller: (693.885µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.968987  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.290036ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.969220  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:daemon-set-controller
I0613 01:30:39.970316  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:deployment-controller: (721.3µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.971872  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.149099ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.972116  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:deployment-controller
I0613 01:30:39.973240  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:disruption-controller: (903.535µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.975171  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.59353ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.975420  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:disruption-controller
I0613 01:30:39.976599  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:endpoint-controller: (882.973µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.979929  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.913679ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.980164  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:endpoint-controller
I0613 01:30:39.985206  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:expand-controller: (4.921689ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.987496  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.751006ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.987846  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:expand-controller
I0613 01:30:39.988754  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:generic-garbage-collector: (700.971µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.990459  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.357523ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.990825  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:generic-garbage-collector
I0613 01:30:39.991633  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:horizontal-pod-autoscaler: (621.552µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.993143  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.098746ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.993394  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:horizontal-pod-autoscaler
I0613 01:30:39.994387  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:job-controller: (743.286µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.996177  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.498345ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.996409  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:job-controller
I0613 01:30:39.997568  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:namespace-controller: (873.115µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.999683  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.526827ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:39.999914  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:namespace-controller
I0613 01:30:40.000816  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:node-controller: (701.862µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.002507  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.161003ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.002846  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:node-controller
I0613 01:30:40.004058  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:persistent-volume-binder: (775.697µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.006557  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.109676ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.006832  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:persistent-volume-binder
I0613 01:30:40.008274  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:pod-garbage-collector: (1.215608ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.011834  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.469494ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.012029  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:pod-garbage-collector
I0613 01:30:40.012950  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:replicaset-controller: (722.885µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.014823  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.485095ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.015016  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:replicaset-controller
I0613 01:30:40.016060  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:replication-controller: (870.818µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.017869  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.373465ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.018143  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:replication-controller
I0613 01:30:40.019317  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:resourcequota-controller: (954.738µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.020959  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.321839ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.021236  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:resourcequota-controller
I0613 01:30:40.022257  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:route-controller: (811.219µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.023888  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.273165ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.024056  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:route-controller
I0613 01:30:40.024913  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:service-account-controller: (739.383µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.026629  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.255082ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.026788  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:service-account-controller
I0613 01:30:40.027721  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:service-controller: (761.081µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.029293  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.212653ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.029578  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:service-controller
I0613 01:30:40.030459  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.030482  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.030624  109680 wrap.go:47] GET /healthz: (949.712µs) 500
goroutine 27208 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc001119180, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc001119180, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc004567ba0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc0028e0be8, 0xc007b912c0, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc0028e0be8, 0xc0101eb600)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc0028e0be8, 0xc0101eb600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc0028e0be8, 0xc0101eb600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc0028e0be8, 0xc0101eb600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc0028e0be8, 0xc0101eb600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc0028e0be8, 0xc0101eb600)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc0028e0be8, 0xc0101eb600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc0028e0be8, 0xc0101eb600)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc0028e0be8, 0xc0101eb600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc0028e0be8, 0xc0101eb600)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc0028e0be8, 0xc0101eb600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc0028e0be8, 0xc0101eb500)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc0028e0be8, 0xc0101eb500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0101e1680, 0xc00c8aea40, 0x7423da0, 0xc0028e0be8, 0xc0101eb500)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56542]
I0613 01:30:40.031873  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:statefulset-controller: (1.883778ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.034013  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.832912ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.034210  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:statefulset-controller
I0613 01:30:40.035293  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:ttl-controller: (945.565µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.037108  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.413728ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.037451  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:ttl-controller
I0613 01:30:40.038399  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:certificate-controller: (730.515µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.067265  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.067295  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.067432  109680 wrap.go:47] GET /healthz: (12.168938ms) 500
goroutine 27180 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc001135180, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc001135180, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0045c7480, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc0071db7d8, 0xc0031f1e00, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc0071db7d8, 0xc010688200)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc0071db7d8, 0xc010688200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc0071db7d8, 0xc010688200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc0071db7d8, 0xc010688200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc0071db7d8, 0xc010688200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc0071db7d8, 0xc010688200)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc0071db7d8, 0xc010688200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc0071db7d8, 0xc010688200)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc0071db7d8, 0xc010688200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc0071db7d8, 0xc010688200)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc0071db7d8, 0xc010688200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc0071db7d8, 0xc010688100)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc0071db7d8, 0xc010688100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc01059a9c0, 0xc00c8aea40, 0x7423da0, 0xc0071db7d8, 0xc010688100)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.067123  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (11.510489ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.068266  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:certificate-controller
I0613 01:30:40.073090  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:pvc-protection-controller: (1.643776ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.093839  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.271087ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.094116  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:pvc-protection-controller
I0613 01:30:40.112836  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:pv-protection-controller: (1.275168ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.130953  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.130991  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.131200  109680 wrap.go:47] GET /healthz: (1.349527ms) 500
goroutine 27189 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0010f8620, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0010f8620, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc00456d980, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc004772230, 0xc00d8efcc0, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc004772230, 0xc01015be00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc004772230, 0xc01015be00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc004772230, 0xc01015be00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc004772230, 0xc01015be00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc004772230, 0xc01015be00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc004772230, 0xc01015be00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc004772230, 0xc01015be00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc004772230, 0xc01015be00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc004772230, 0xc01015be00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc004772230, 0xc01015be00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc004772230, 0xc01015be00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc004772230, 0xc01015bd00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc004772230, 0xc01015bd00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0104b87e0, 0xc00c8aea40, 0x7423da0, 0xc004772230, 0xc01015bd00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:40.133454  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.809077ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.133647  109680 storage_rbac.go:195] created clusterrole.rbac.authorization.k8s.io/system:controller:pv-protection-controller
I0613 01:30:40.147510  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.147558  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.147694  109680 wrap.go:47] GET /healthz: (1.547891ms) 500
goroutine 27268 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0011359d0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0011359d0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0045d7760, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc0071dbcd0, 0xc0107c8280, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc0071dbcd0, 0xc010689600)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc0071dbcd0, 0xc010689600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc0071dbcd0, 0xc010689600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc0071dbcd0, 0xc010689600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc0071dbcd0, 0xc010689600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc0071dbcd0, 0xc010689600)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc0071dbcd0, 0xc010689600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc0071dbcd0, 0xc010689600)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc0071dbcd0, 0xc010689600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc0071dbcd0, 0xc010689600)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc0071dbcd0, 0xc010689600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc0071dbcd0, 0xc010689500)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc0071dbcd0, 0xc010689500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc01059b8c0, 0xc00c8aea40, 0x7423da0, 0xc0071dbcd0, 0xc010689500)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.152829  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/cluster-admin: (1.274505ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.174584  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.93432ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.175395  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/cluster-admin
I0613 01:30:40.192987  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:discovery: (1.393104ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.215241  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.187445ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.216660  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:discovery
I0613 01:30:40.231296  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.231327  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.231496  109680 wrap.go:47] GET /healthz: (1.591696ms) 500
goroutine 27282 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0010d20e0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0010d20e0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc00468ca60, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc000aa4ba0, 0xc00dec0640, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc000aa4ba0, 0xc00f6b1b00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc000aa4ba0, 0xc00f6b1b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc000aa4ba0, 0xc00f6b1b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc000aa4ba0, 0xc00f6b1b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc000aa4ba0, 0xc00f6b1b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc000aa4ba0, 0xc00f6b1b00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc000aa4ba0, 0xc00f6b1b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc000aa4ba0, 0xc00f6b1b00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc000aa4ba0, 0xc00f6b1b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc000aa4ba0, 0xc00f6b1b00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc000aa4ba0, 0xc00f6b1b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc000aa4ba0, 0xc00f6b1a00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc000aa4ba0, 0xc00f6b1a00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc00f991140, 0xc00c8aea40, 0x7423da0, 0xc000aa4ba0, 0xc00f6b1a00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:40.233216  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:basic-user: (1.363344ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.247288  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.247319  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.247625  109680 wrap.go:47] GET /healthz: (1.477914ms) 500
goroutine 27284 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0010d21c0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0010d21c0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc00468cdc0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc000aa4bd8, 0xc003bac3c0, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc000aa4bd8, 0xc01095c200)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc000aa4bd8, 0xc01095c200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc000aa4bd8, 0xc01095c200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc000aa4bd8, 0xc01095c200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc000aa4bd8, 0xc01095c200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc000aa4bd8, 0xc01095c200)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc000aa4bd8, 0xc01095c200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc000aa4bd8, 0xc01095c200)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc000aa4bd8, 0xc01095c200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc000aa4bd8, 0xc01095c200)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc000aa4bd8, 0xc01095c200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc000aa4bd8, 0xc01095c100)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc000aa4bd8, 0xc01095c100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc00f9914a0, 0xc00c8aea40, 0x7423da0, 0xc000aa4bd8, 0xc01095c100)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.253666  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.220347ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.254046  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:basic-user
I0613 01:30:40.272928  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:public-info-viewer: (1.359659ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.295535  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.628732ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.295751  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:public-info-viewer
I0613 01:30:40.313236  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:node-proxier: (1.651135ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.331414  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.331531  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.331756  109680 wrap.go:47] GET /healthz: (1.88874ms) 500
goroutine 27259 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00110b9d0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00110b9d0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0046e5640, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc006b2abe0, 0xc003cf9400, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc006b2abe0, 0xc0109d6500)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc006b2abe0, 0xc0109d6500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc006b2abe0, 0xc0109d6500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc006b2abe0, 0xc0109d6500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc006b2abe0, 0xc0109d6500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc006b2abe0, 0xc0109d6500)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc006b2abe0, 0xc0109d6500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc006b2abe0, 0xc0109d6500)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc006b2abe0, 0xc0109d6500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc006b2abe0, 0xc0109d6500)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc006b2abe0, 0xc0109d6500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc006b2abe0, 0xc0109d6400)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc006b2abe0, 0xc0109d6400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0105e7920, 0xc00c8aea40, 0x7423da0, 0xc006b2abe0, 0xc0109d6400)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:40.334118  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.38924ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.334334  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:node-proxier
I0613 01:30:40.347419  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.347451  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.347609  109680 wrap.go:47] GET /healthz: (1.500294ms) 500
goroutine 27240 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc001129b90, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc001129b90, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc00458c5e0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc000ee7590, 0xc003df4500, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc000ee7590, 0xc01025be00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc000ee7590, 0xc01025be00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc000ee7590, 0xc01025be00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc000ee7590, 0xc01025be00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc000ee7590, 0xc01025be00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc000ee7590, 0xc01025be00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc000ee7590, 0xc01025be00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc000ee7590, 0xc01025be00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc000ee7590, 0xc01025be00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc000ee7590, 0xc01025be00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc000ee7590, 0xc01025be00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc000ee7590, 0xc01025bd00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc000ee7590, 0xc01025bd00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc01024bce0, 0xc00c8aea40, 0x7423da0, 0xc000ee7590, 0xc01025bd00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.352925  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:kube-controller-manager: (1.462103ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.373696  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.13767ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.374107  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:kube-controller-manager
I0613 01:30:40.392958  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:kube-dns: (1.425061ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.413649  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.076913ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.413930  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:kube-dns
I0613 01:30:40.430886  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.430917  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.431068  109680 wrap.go:47] GET /healthz: (1.331921ms) 500
goroutine 27298 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0010c2000, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0010c2000, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc004748a00, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc007260448, 0xc0107c8a00, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc007260448, 0xc010ab2d00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc007260448, 0xc010ab2d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc007260448, 0xc010ab2d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc007260448, 0xc010ab2d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc007260448, 0xc010ab2d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc007260448, 0xc010ab2d00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc007260448, 0xc010ab2d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc007260448, 0xc010ab2d00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc007260448, 0xc010ab2d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc007260448, 0xc010ab2d00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc007260448, 0xc010ab2d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc007260448, 0xc010ab2c00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc007260448, 0xc010ab2c00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc010447f80, 0xc00c8aea40, 0x7423da0, 0xc007260448, 0xc010ab2c00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56542]
I0613 01:30:40.432459  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:kube-scheduler: (1.052349ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.447679  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.447814  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.448062  109680 wrap.go:47] GET /healthz: (1.861897ms) 500
goroutine 27314 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0009f60e0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0009f60e0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc00472d200, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc006b2ad08, 0xc0107c8f00, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc006b2ad08, 0xc0109d7800)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc006b2ad08, 0xc0109d7800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc006b2ad08, 0xc0109d7800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc006b2ad08, 0xc0109d7800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc006b2ad08, 0xc0109d7800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc006b2ad08, 0xc0109d7800)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc006b2ad08, 0xc0109d7800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc006b2ad08, 0xc0109d7800)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc006b2ad08, 0xc0109d7800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc006b2ad08, 0xc0109d7800)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc006b2ad08, 0xc0109d7800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc006b2ad08, 0xc0109d7700)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc006b2ad08, 0xc0109d7700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc010b943c0, 0xc00c8aea40, 0x7423da0, 0xc006b2ad08, 0xc0109d7700)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.453978  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.540578ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.454206  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:kube-scheduler
I0613 01:30:40.473376  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:volume-scheduler: (1.821366ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.494191  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.629375ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.494419  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:volume-scheduler
I0613 01:30:40.513055  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:node: (1.526522ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.531277  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.531309  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.531460  109680 wrap.go:47] GET /healthz: (1.68665ms) 500
goroutine 27330 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0010cabd0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0010cabd0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc004796ce0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc000ee7818, 0xc0107c9400, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc000ee7818, 0xc010a77b00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc000ee7818, 0xc010a77b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc000ee7818, 0xc010a77b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc000ee7818, 0xc010a77b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc000ee7818, 0xc010a77b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc000ee7818, 0xc010a77b00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc000ee7818, 0xc010a77b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc000ee7818, 0xc010a77b00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc000ee7818, 0xc010a77b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc000ee7818, 0xc010a77b00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc000ee7818, 0xc010a77b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc000ee7818, 0xc010a77a00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc000ee7818, 0xc010a77a00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc010a83440, 0xc00c8aea40, 0x7423da0, 0xc000ee7818, 0xc010a77a00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56542]
I0613 01:30:40.533396  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (1.962719ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.533676  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:node
I0613 01:30:40.547758  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.547792  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.547969  109680 wrap.go:47] GET /healthz: (1.772078ms) 500
goroutine 27335 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0010cae70, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0010cae70, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc004797e40, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc000ee78d0, 0xc003cf9b80, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc000ee78d0, 0xc010c42700)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc000ee78d0, 0xc010c42700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc000ee78d0, 0xc010c42700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc000ee78d0, 0xc010c42700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc000ee78d0, 0xc010c42700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc000ee78d0, 0xc010c42700)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc000ee78d0, 0xc010c42700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc000ee78d0, 0xc010c42700)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc000ee78d0, 0xc010c42700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc000ee78d0, 0xc010c42700)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc000ee78d0, 0xc010c42700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc000ee78d0, 0xc010c42600)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc000ee78d0, 0xc010c42600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc010a83a40, 0xc00c8aea40, 0x7423da0, 0xc000ee78d0, 0xc010c42600)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.552544  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:attachdetach-controller: (1.209319ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.573689  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.122291ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.573994  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:attachdetach-controller
I0613 01:30:40.597068  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:clusterrole-aggregation-controller: (1.334443ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.614054  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.469801ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.614485  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:clusterrole-aggregation-controller
I0613 01:30:40.652177  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.652219  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.652370  109680 wrap.go:47] GET /healthz: (2.371266ms) 500
goroutine 27316 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0009f67e0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0009f67e0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0047765e0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc006b2ae08, 0xc0091e1a40, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc006b2ae08, 0xc010e74100)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc006b2ae08, 0xc010e74100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc006b2ae08, 0xc010e74100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc006b2ae08, 0xc010e74100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc006b2ae08, 0xc010e74100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc006b2ae08, 0xc010e74100)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc006b2ae08, 0xc010e74100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc006b2ae08, 0xc010e74100)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc006b2ae08, 0xc010e74100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc006b2ae08, 0xc010e74100)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc006b2ae08, 0xc010e74100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc006b2ae08, 0xc010e74000)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc006b2ae08, 0xc010e74000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc010b94780, 0xc00c8aea40, 0x7423da0, 0xc006b2ae08, 0xc010e74000)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56542]
I0613 01:30:40.652477  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.652501  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.652622  109680 wrap.go:47] GET /healthz: (1.941543ms) 500
goroutine 27339 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0010cb1f0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0010cb1f0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0047c2c20, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc000ee79d8, 0xc011014000, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc000ee79d8, 0xc010c42b00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc000ee79d8, 0xc010c42b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc000ee79d8, 0xc010c42b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc000ee79d8, 0xc010c42b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc000ee79d8, 0xc010c42b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc000ee79d8, 0xc010c42b00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc000ee79d8, 0xc010c42b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc000ee79d8, 0xc010c42b00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc000ee79d8, 0xc010c42b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc000ee79d8, 0xc010c42b00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc000ee79d8, 0xc010c42b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc000ee79d8, 0xc010c42a00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc000ee79d8, 0xc010c42a00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc010a83e60, 0xc00c8aea40, 0x7423da0, 0xc000ee79d8, 0xc010c42a00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56548]
I0613 01:30:40.652897  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:cronjob-controller: (2.832607ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:40.655226  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (1.856814ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56548]
I0613 01:30:40.655491  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:cronjob-controller
I0613 01:30:40.672956  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:daemon-set-controller: (1.405455ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.694040  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.51494ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.694297  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:daemon-set-controller
I0613 01:30:40.713023  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:deployment-controller: (1.457846ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.730927  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.730961  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.731134  109680 wrap.go:47] GET /healthz: (1.316349ms) 500
goroutine 27308 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0010c2e00, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0010c2e00, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0047dfb40, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc007260a48, 0xc003bacb40, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc007260a48, 0xc010d1d600)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc007260a48, 0xc010d1d600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc007260a48, 0xc010d1d600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc007260a48, 0xc010d1d600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc007260a48, 0xc010d1d600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc007260a48, 0xc010d1d600)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc007260a48, 0xc010d1d600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc007260a48, 0xc010d1d600)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc007260a48, 0xc010d1d600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc007260a48, 0xc010d1d600)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc007260a48, 0xc010d1d600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc007260a48, 0xc010d1d500)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc007260a48, 0xc010d1d500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc010b49560, 0xc00c8aea40, 0x7423da0, 0xc007260a48, 0xc010d1d500)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56542]
I0613 01:30:40.733785  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.277984ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.734010  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:deployment-controller
I0613 01:30:40.747685  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.747717  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.747907  109680 wrap.go:47] GET /healthz: (1.501217ms) 500
goroutine 27213 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc001119b90, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc001119b90, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc00480c520, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc0028e0e68, 0xc0107c9a40, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc0028e0e68, 0xc011218700)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc0028e0e68, 0xc011218700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc0028e0e68, 0xc011218700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc0028e0e68, 0xc011218700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc0028e0e68, 0xc011218700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc0028e0e68, 0xc011218700)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc0028e0e68, 0xc011218700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc0028e0e68, 0xc011218700)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc0028e0e68, 0xc011218700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc0028e0e68, 0xc011218700)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc0028e0e68, 0xc011218700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc0028e0e68, 0xc011218600)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc0028e0e68, 0xc011218600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0101e1e00, 0xc00c8aea40, 0x7423da0, 0xc0028e0e68, 0xc011218600)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.752638  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:disruption-controller: (1.223707ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.773943  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.418281ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.774184  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:disruption-controller
I0613 01:30:40.792748  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:endpoint-controller: (1.265357ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.813829  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.279727ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.814081  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:endpoint-controller
I0613 01:30:40.830940  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.830970  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.831130  109680 wrap.go:47] GET /healthz: (1.341184ms) 500
goroutine 27380 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0010c3f10, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0010c3f10, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc004850c60, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc007260de0, 0xc0113ea000, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc007260de0, 0xc0113c8800)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc007260de0, 0xc0113c8800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc007260de0, 0xc0113c8800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc007260de0, 0xc0113c8800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc007260de0, 0xc0113c8800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc007260de0, 0xc0113c8800)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc007260de0, 0xc0113c8800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc007260de0, 0xc0113c8800)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc007260de0, 0xc0113c8800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc007260de0, 0xc0113c8800)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc007260de0, 0xc0113c8800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc007260de0, 0xc0113c8700)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc007260de0, 0xc0113c8700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0113d2540, 0xc00c8aea40, 0x7423da0, 0xc007260de0, 0xc0113c8700)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56542]
I0613 01:30:40.832885  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:expand-controller: (1.133106ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.847241  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.847480  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.847722  109680 wrap.go:47] GET /healthz: (1.567819ms) 500
goroutine 27350 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0008c47e0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0008c47e0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0047e3c80, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc004773208, 0xc003df5680, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc004773208, 0xc010da3800)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc004773208, 0xc010da3800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc004773208, 0xc010da3800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc004773208, 0xc010da3800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc004773208, 0xc010da3800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc004773208, 0xc010da3800)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc004773208, 0xc010da3800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc004773208, 0xc010da3800)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc004773208, 0xc010da3800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc004773208, 0xc010da3800)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc004773208, 0xc010da3800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc004773208, 0xc010da3700)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc004773208, 0xc010da3700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc010d736e0, 0xc00c8aea40, 0x7423da0, 0xc004773208, 0xc010da3700)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.853581  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.131474ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.856552  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:expand-controller
I0613 01:30:40.872890  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:generic-garbage-collector: (1.300869ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.893945  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.322538ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.894217  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:generic-garbage-collector
I0613 01:30:40.913042  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:horizontal-pod-autoscaler: (1.454099ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.930981  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.931017  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.931162  109680 wrap.go:47] GET /healthz: (1.292845ms) 500
goroutine 27352 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0008c48c0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0008c48c0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0048789a0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc004773328, 0xc010d02780, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc004773328, 0xc011568400)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc004773328, 0xc011568400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc004773328, 0xc011568400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc004773328, 0xc011568400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc004773328, 0xc011568400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc004773328, 0xc011568400)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc004773328, 0xc011568400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc004773328, 0xc011568400)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc004773328, 0xc011568400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc004773328, 0xc011568400)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc004773328, 0xc011568400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc004773328, 0xc011568300)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc004773328, 0xc011568300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc010d73bc0, 0xc00c8aea40, 0x7423da0, 0xc004773328, 0xc011568300)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56542]
I0613 01:30:40.933317  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (1.883114ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.933534  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:horizontal-pod-autoscaler
I0613 01:30:40.948003  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:40.948034  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:40.948334  109680 wrap.go:47] GET /healthz: (2.123114ms) 500
goroutine 27357 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0008c5030, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0008c5030, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0048ee7a0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc004773470, 0xc010d02c80, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc004773470, 0xc011569100)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc004773470, 0xc011569100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc004773470, 0xc011569100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc004773470, 0xc011569100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc004773470, 0xc011569100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc004773470, 0xc011569100)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc004773470, 0xc011569100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc004773470, 0xc011569100)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc004773470, 0xc011569100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc004773470, 0xc011569100)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc004773470, 0xc011569100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc004773470, 0xc011569000)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc004773470, 0xc011569000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0115ea1e0, 0xc00c8aea40, 0x7423da0, 0xc004773470, 0xc011569000)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.952659  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:job-controller: (1.220725ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.974128  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.50054ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:40.974348  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:job-controller
I0613 01:30:40.993180  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:namespace-controller: (1.672264ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.016349  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.701306ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.016710  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:namespace-controller
I0613 01:30:41.032796  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.032823  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.033003  109680 wrap.go:47] GET /healthz: (1.460197ms) 500
goroutine 27412 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0008864d0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0008864d0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0049127a0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc0028e1058, 0xc010d03400, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc0028e1058, 0xc001256200)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc0028e1058, 0xc001256200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc0028e1058, 0xc001256200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc0028e1058, 0xc001256200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc0028e1058, 0xc001256200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc0028e1058, 0xc001256200)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc0028e1058, 0xc001256200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc0028e1058, 0xc001256200)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc0028e1058, 0xc001256200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc0028e1058, 0xc001256200)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc0028e1058, 0xc001256200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc0028e1058, 0xc001256100)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc0028e1058, 0xc001256100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0112b5020, 0xc00c8aea40, 0x7423da0, 0xc0028e1058, 0xc001256100)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:41.033356  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:node-controller: (1.400724ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.047427  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.047493  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.047734  109680 wrap.go:47] GET /healthz: (1.425602ms) 500
goroutine 27387 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc000368d20, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc000368d20, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc004910d80, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc007260fd8, 0xc007b91b80, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc007260fd8, 0xc011b1ca00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc007260fd8, 0xc011b1ca00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc007260fd8, 0xc011b1ca00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc007260fd8, 0xc011b1ca00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc007260fd8, 0xc011b1ca00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc007260fd8, 0xc011b1ca00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc007260fd8, 0xc011b1ca00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc007260fd8, 0xc011b1ca00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc007260fd8, 0xc011b1ca00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc007260fd8, 0xc011b1ca00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc007260fd8, 0xc011b1ca00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc007260fd8, 0xc011b1c900)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc007260fd8, 0xc011b1c900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0113d3b60, 0xc00c8aea40, 0x7423da0, 0xc007260fd8, 0xc011b1c900)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.053354  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (1.915691ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.053717  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:node-controller
I0613 01:30:41.072821  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:persistent-volume-binder: (1.284876ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.093820  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.26091ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.094082  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:persistent-volume-binder
I0613 01:30:41.113094  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:pod-garbage-collector: (1.455414ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.131191  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.131225  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.131421  109680 wrap.go:47] GET /healthz: (1.615781ms) 500
goroutine 27427 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc000862540, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc000862540, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc00497b4c0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc004773690, 0xc0113ea780, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc004773690, 0xc0012d4c00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc004773690, 0xc0012d4c00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc004773690, 0xc0012d4c00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc004773690, 0xc0012d4c00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc004773690, 0xc0012d4c00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc004773690, 0xc0012d4c00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc004773690, 0xc0012d4c00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc004773690, 0xc0012d4c00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc004773690, 0xc0012d4c00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc004773690, 0xc0012d4c00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc004773690, 0xc0012d4c00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc004773690, 0xc0012d4b00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc004773690, 0xc0012d4b00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0115eb7a0, 0xc00c8aea40, 0x7423da0, 0xc004773690, 0xc0012d4b00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56542]
I0613 01:30:41.133488  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.013391ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.133741  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:pod-garbage-collector
I0613 01:30:41.147060  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.147090  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.147289  109680 wrap.go:47] GET /healthz: (1.135918ms) 500
goroutine 27404 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0008668c0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0008668c0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc004980180, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc006b2b328, 0xc010d03a40, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc006b2b328, 0xc0114b1a00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc006b2b328, 0xc0114b1a00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc006b2b328, 0xc0114b1a00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc006b2b328, 0xc0114b1a00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc006b2b328, 0xc0114b1a00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc006b2b328, 0xc0114b1a00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc006b2b328, 0xc0114b1a00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc006b2b328, 0xc0114b1a00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc006b2b328, 0xc0114b1a00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc006b2b328, 0xc0114b1a00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc006b2b328, 0xc0114b1a00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc006b2b328, 0xc0114b1900)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc006b2b328, 0xc0114b1900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc011401380, 0xc00c8aea40, 0x7423da0, 0xc006b2b328, 0xc0114b1900)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.152427  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:replicaset-controller: (961.885µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.173146  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (1.600573ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.173429  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:replicaset-controller
I0613 01:30:41.192671  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:replication-controller: (1.105742ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.213601  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.091212ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.213881  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:replication-controller
I0613 01:30:41.238765  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.238806  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.238972  109680 wrap.go:47] GET /healthz: (9.160574ms) 500
goroutine 27435 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc000862f50, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc000862f50, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0049bc1c0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc004773898, 0xc0057c4140, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc004773898, 0xc0056e4300)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc004773898, 0xc0056e4300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc004773898, 0xc0056e4300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc004773898, 0xc0056e4300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc004773898, 0xc0056e4300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc004773898, 0xc0056e4300)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc004773898, 0xc0056e4300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc004773898, 0xc0056e4300)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc004773898, 0xc0056e4300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc004773898, 0xc0056e4300)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc004773898, 0xc0056e4300)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc004773898, 0xc0056e4200)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc004773898, 0xc0056e4200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc004352480, 0xc00c8aea40, 0x7423da0, 0xc004773898, 0xc0056e4200)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:41.239439  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:resourcequota-controller: (1.220838ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.247174  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.247208  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.247402  109680 wrap.go:47] GET /healthz: (1.267374ms) 500
goroutine 27443 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc000867340, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc000867340, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0049c6b20, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc006b2b5e0, 0xc0113eaf00, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc006b2b5e0, 0xc004350d00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc006b2b5e0, 0xc004350d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc006b2b5e0, 0xc004350d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc006b2b5e0, 0xc004350d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc006b2b5e0, 0xc004350d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc006b2b5e0, 0xc004350d00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc006b2b5e0, 0xc004350d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc006b2b5e0, 0xc004350d00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc006b2b5e0, 0xc004350d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc006b2b5e0, 0xc004350d00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc006b2b5e0, 0xc004350d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc006b2b5e0, 0xc004350c00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc006b2b5e0, 0xc004350c00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc011401da0, 0xc00c8aea40, 0x7423da0, 0xc006b2b5e0, 0xc004350c00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.253537  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.093617ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.253740  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:resourcequota-controller
I0613 01:30:41.273391  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:route-controller: (1.669394ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.294602  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.992881ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.294872  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:route-controller
I0613 01:30:41.313314  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:service-account-controller: (1.766105ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.331131  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.331163  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.331345  109680 wrap.go:47] GET /healthz: (1.568615ms) 500
goroutine 27461 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00085c380, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00085c380, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0049e3d20, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc007261340, 0xc005c38000, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc007261340, 0xc0058cb000)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc007261340, 0xc0058cb000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc007261340, 0xc0058cb000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc007261340, 0xc0058cb000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc007261340, 0xc0058cb000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc007261340, 0xc0058cb000)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc007261340, 0xc0058cb000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc007261340, 0xc0058cb000)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc007261340, 0xc0058cb000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc007261340, 0xc0058cb000)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc007261340, 0xc0058cb000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc007261340, 0xc0058caf00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc007261340, 0xc0058caf00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc004a5af00, 0xc00c8aea40, 0x7423da0, 0xc007261340, 0xc0058caf00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56542]
I0613 01:30:41.333604  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (1.616969ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.333808  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:service-account-controller
I0613 01:30:41.347155  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.347344  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.347589  109680 wrap.go:47] GET /healthz: (1.420562ms) 500
goroutine 27463 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00085c4d0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00085c4d0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0049f0240, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc007261380, 0xc005c38640, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc007261380, 0xc0058cb900)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc007261380, 0xc0058cb900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc007261380, 0xc0058cb900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc007261380, 0xc0058cb900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc007261380, 0xc0058cb900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc007261380, 0xc0058cb900)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc007261380, 0xc0058cb900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc007261380, 0xc0058cb900)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc007261380, 0xc0058cb900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc007261380, 0xc0058cb900)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc007261380, 0xc0058cb900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc007261380, 0xc0058cb800)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc007261380, 0xc0058cb800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc004a5b2c0, 0xc00c8aea40, 0x7423da0, 0xc007261380, 0xc0058cb800)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.352704  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:service-controller: (1.278075ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.373726  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.243225ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.374127  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:service-controller
I0613 01:30:41.392794  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:statefulset-controller: (1.298322ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.414544  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.913752ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.414897  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:statefulset-controller
I0613 01:30:41.447586  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.447620  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.447781  109680 wrap.go:47] GET /healthz: (17.955111ms) 500
goroutine 27467 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00085ca80, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00085ca80, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0049f10c0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc007261478, 0xc00dec0f00, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc007261478, 0xc005ff0e00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc007261478, 0xc005ff0e00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc007261478, 0xc005ff0e00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc007261478, 0xc005ff0e00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc007261478, 0xc005ff0e00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc007261478, 0xc005ff0e00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc007261478, 0xc005ff0e00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc007261478, 0xc005ff0e00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc007261478, 0xc005ff0e00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc007261478, 0xc005ff0e00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc007261478, 0xc005ff0e00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc007261478, 0xc005ff0d00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc007261478, 0xc005ff0d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc004a5be00, 0xc00c8aea40, 0x7423da0, 0xc007261478, 0xc005ff0d00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56542]
I0613 01:30:41.449507  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:ttl-controller: (2.298995ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.474555  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.157484ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.474817  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:ttl-controller
I0613 01:30:41.476299  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:certificate-controller: (1.100769ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.485535  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.485562  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.485700  109680 wrap.go:47] GET /healthz: (38.506195ms) 500
goroutine 27474 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc0010eaee0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc0010eaee0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc00471e7e0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc00ade6228, 0xc011014640, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc00ade6228, 0xc01087f900)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc00ade6228, 0xc01087f900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc00ade6228, 0xc01087f900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc00ade6228, 0xc01087f900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc00ade6228, 0xc01087f900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc00ade6228, 0xc01087f900)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc00ade6228, 0xc01087f900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc00ade6228, 0xc01087f900)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc00ade6228, 0xc01087f900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc00ade6228, 0xc01087f900)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc00ade6228, 0xc01087f900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc00ade6228, 0xc01087f800)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc00ade6228, 0xc01087f800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc010882c60, 0xc00c8aea40, 0x7423da0, 0xc00ade6228, 0xc01087f800)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56714]
I0613 01:30:41.493996  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.381647ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.494232  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:certificate-controller
I0613 01:30:41.512801  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:pvc-protection-controller: (1.249322ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
E0613 01:30:41.523554  109680 event.go:249] Unable to write event: 'Patch http://127.0.0.1:41869/api/v1/namespaces/permit-plugin7714e4ce-22b1-4613-8a0d-145461140b81/events/test-pod.15a79dea7c9775c3: dial tcp 127.0.0.1:41869: connect: connection refused' (may retry after sleeping)
I0613 01:30:41.533919  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.355945ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.534166  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:pvc-protection-controller
I0613 01:30:41.535900  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.535924  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.536076  109680 wrap.go:47] GET /healthz: (1.009808ms) 500
goroutine 27490 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc000854770, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc000854770, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc004ab14e0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc0028e1918, 0xc0113eb680, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc0028e1918, 0xc0064c0900)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc0028e1918, 0xc0064c0900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc0028e1918, 0xc0064c0900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc0028e1918, 0xc0064c0900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc0028e1918, 0xc0064c0900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc0028e1918, 0xc0064c0900)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc0028e1918, 0xc0064c0900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc0028e1918, 0xc0064c0900)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc0028e1918, 0xc0064c0900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc0028e1918, 0xc0064c0900)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc0028e1918, 0xc0064c0900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc0028e1918, 0xc0064c0800)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc0028e1918, 0xc0064c0800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc005b3d020, 0xc00c8aea40, 0x7423da0, 0xc0028e1918, 0xc0064c0800)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:41.547257  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.547288  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.547493  109680 wrap.go:47] GET /healthz: (1.350625ms) 500
goroutine 27492 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc000854850, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc000854850, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc004ab19c0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc0028e1928, 0xc005c38b40, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc0028e1928, 0xc0064c0d00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc0028e1928, 0xc0064c0d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc0028e1928, 0xc0064c0d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc0028e1928, 0xc0064c0d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc0028e1928, 0xc0064c0d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc0028e1928, 0xc0064c0d00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc0028e1928, 0xc0064c0d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc0028e1928, 0xc0064c0d00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc0028e1928, 0xc0064c0d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc0028e1928, 0xc0064c0d00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc0028e1928, 0xc0064c0d00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc0028e1928, 0xc0064c0c00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc0028e1928, 0xc0064c0c00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc005b3d320, 0xc00c8aea40, 0x7423da0, 0xc0028e1928, 0xc0064c0c00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.552626  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:pv-protection-controller: (1.190936ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.573919  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.394502ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.574183  109680 storage_rbac.go:223] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:pv-protection-controller
I0613 01:30:41.592742  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/extension-apiserver-authentication-reader: (1.211485ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.594492  109680 wrap.go:47] GET /api/v1/namespaces/kube-system: (1.29912ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.614207  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles: (2.670945ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.614515  109680 storage_rbac.go:254] created role.rbac.authorization.k8s.io/extension-apiserver-authentication-reader in kube-system
I0613 01:30:41.630827  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.630875  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.631047  109680 wrap.go:47] GET /healthz: (1.278969ms) 500
goroutine 27497 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc000854f50, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc000854f50, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0048690c0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc0028e1a78, 0xc0057c48c0, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc0028e1a78, 0xc0064c1900)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc0028e1a78, 0xc0064c1900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc0028e1a78, 0xc0064c1900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc0028e1a78, 0xc0064c1900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc0028e1a78, 0xc0064c1900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc0028e1a78, 0xc0064c1900)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc0028e1a78, 0xc0064c1900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc0028e1a78, 0xc0064c1900)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc0028e1a78, 0xc0064c1900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc0028e1a78, 0xc0064c1900)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc0028e1a78, 0xc0064c1900)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc0028e1a78, 0xc0064c1800)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc0028e1a78, 0xc0064c1800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc005b3d980, 0xc00c8aea40, 0x7423da0, 0xc0028e1a78, 0xc0064c1800)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:41.632447  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system:controller:bootstrap-signer: (1.100206ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.634078  109680 wrap.go:47] GET /api/v1/namespaces/kube-system: (1.235212ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.647030  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.647062  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.647264  109680 wrap.go:47] GET /healthz: (1.157812ms) 500
goroutine 27451 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc000852b60, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc000852b60, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc004a55e80, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc006b2b9c8, 0xc005c39180, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc006b2b9c8, 0xc006c60500)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc006b2b9c8, 0xc006c60500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc006b2b9c8, 0xc006c60500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc006b2b9c8, 0xc006c60500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc006b2b9c8, 0xc006c60500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc006b2b9c8, 0xc006c60500)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc006b2b9c8, 0xc006c60500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc006b2b9c8, 0xc006c60500)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc006b2b9c8, 0xc006c60500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc006b2b9c8, 0xc006c60500)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc006b2b9c8, 0xc006c60500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc006b2b9c8, 0xc006c60400)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc006b2b9c8, 0xc006c60400)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0058188a0, 0xc00c8aea40, 0x7423da0, 0xc006b2b9c8, 0xc006c60400)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.653790  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles: (2.123977ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.654046  109680 storage_rbac.go:254] created role.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-system
I0613 01:30:41.672898  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system:controller:cloud-provider: (1.345232ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.674719  109680 wrap.go:47] GET /api/v1/namespaces/kube-system: (1.445979ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.693763  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles: (2.21772ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.694000  109680 storage_rbac.go:254] created role.rbac.authorization.k8s.io/system:controller:cloud-provider in kube-system
I0613 01:30:41.713135  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system:controller:token-cleaner: (1.47512ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.714924  109680 wrap.go:47] GET /api/v1/namespaces/kube-system: (1.2084ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.730874  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.730906  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.731070  109680 wrap.go:47] GET /healthz: (1.304913ms) 500
goroutine 27516 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00082caf0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00082caf0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0072c2320, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc0072617f0, 0xc005c39680, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc0072617f0, 0xc007c60700)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc0072617f0, 0xc007c60700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc0072617f0, 0xc007c60700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc0072617f0, 0xc007c60700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc0072617f0, 0xc007c60700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc0072617f0, 0xc007c60700)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc0072617f0, 0xc007c60700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc0072617f0, 0xc007c60700)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc0072617f0, 0xc007c60700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc0072617f0, 0xc007c60700)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc0072617f0, 0xc007c60700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc0072617f0, 0xc007c60600)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc0072617f0, 0xc007c60600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc00620ff80, 0xc00c8aea40, 0x7423da0, 0xc0072617f0, 0xc007c60600)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:41.733507  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles: (2.025262ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.733703  109680 storage_rbac.go:254] created role.rbac.authorization.k8s.io/system:controller:token-cleaner in kube-system
I0613 01:30:41.747237  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.747293  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.747446  109680 wrap.go:47] GET /healthz: (1.328215ms) 500
goroutine 27375 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc000858620, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc000858620, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0048e2180, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc000aa5538, 0xc003bad2c0, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc000aa5538, 0xc006497100)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc000aa5538, 0xc006497100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc000aa5538, 0xc006497100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc000aa5538, 0xc006497100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc000aa5538, 0xc006497100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc000aa5538, 0xc006497100)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc000aa5538, 0xc006497100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc000aa5538, 0xc006497100)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc000aa5538, 0xc006497100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc000aa5538, 0xc006497100)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc000aa5538, 0xc006497100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc000aa5538, 0xc006497000)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc000aa5538, 0xc006497000)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc00646e900, 0xc00c8aea40, 0x7423da0, 0xc000aa5538, 0xc006497000)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.752908  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system::leader-locking-kube-controller-manager: (1.426978ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.754498  109680 wrap.go:47] GET /api/v1/namespaces/kube-system: (1.160572ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.773314  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles: (1.795527ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.773828  109680 storage_rbac.go:254] created role.rbac.authorization.k8s.io/system::leader-locking-kube-controller-manager in kube-system
I0613 01:30:41.792707  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system::leader-locking-kube-scheduler: (1.189105ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.794337  109680 wrap.go:47] GET /api/v1/namespaces/kube-system: (1.169854ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.813802  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles: (2.108315ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.814106  109680 storage_rbac.go:254] created role.rbac.authorization.k8s.io/system::leader-locking-kube-scheduler in kube-system
I0613 01:30:41.831008  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.831038  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.831195  109680 wrap.go:47] GET /healthz: (1.413558ms) 500
goroutine 27441 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00081e460, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00081e460, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc00779a7c0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc004773b00, 0xc008456280, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc004773b00, 0xc008446200)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc004773b00, 0xc008446200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc004773b00, 0xc008446200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc004773b00, 0xc008446200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc004773b00, 0xc008446200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc004773b00, 0xc008446200)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc004773b00, 0xc008446200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc004773b00, 0xc008446200)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc004773b00, 0xc008446200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc004773b00, 0xc008446200)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc004773b00, 0xc008446200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc004773b00, 0xc008446100)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc004773b00, 0xc008446100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc004353380, 0xc00c8aea40, 0x7423da0, 0xc004773b00, 0xc008446100)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:41.832621  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-public/roles/system:controller:bootstrap-signer: (973.251µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.834073  109680 wrap.go:47] GET /api/v1/namespaces/kube-public: (1.106993ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.847317  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.847386  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.847827  109680 wrap.go:47] GET /healthz: (1.641892ms) 500
goroutine 27530 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc000822e00, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc000822e00, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc007510c80, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc006b2bef8, 0xc0113ebe00, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc006b2bef8, 0xc00e944f00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc006b2bef8, 0xc00e944f00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc006b2bef8, 0xc00e944f00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc006b2bef8, 0xc00e944f00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc006b2bef8, 0xc00e944f00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc006b2bef8, 0xc00e944f00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc006b2bef8, 0xc00e944f00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc006b2bef8, 0xc00e944f00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc006b2bef8, 0xc00e944f00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc006b2bef8, 0xc00e944f00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc006b2bef8, 0xc00e944f00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc006b2bef8, 0xc00e944e00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc006b2bef8, 0xc00e944e00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc007de2660, 0xc00c8aea40, 0x7423da0, 0xc006b2bef8, 0xc00e944e00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.853535  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-public/roles: (2.053078ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.853894  109680 storage_rbac.go:254] created role.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-public
I0613 01:30:41.873009  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system::extension-apiserver-authentication-reader: (1.414856ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.874850  109680 wrap.go:47] GET /api/v1/namespaces/kube-system: (1.359122ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.894104  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings: (2.560239ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.894340  109680 storage_rbac.go:284] created rolebinding.rbac.authorization.k8s.io/system::extension-apiserver-authentication-reader in kube-system
I0613 01:30:41.913123  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system::leader-locking-kube-controller-manager: (1.549873ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.914956  109680 wrap.go:47] GET /api/v1/namespaces/kube-system: (1.357413ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:41.931143  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.931227  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.931482  109680 wrap.go:47] GET /healthz: (1.430035ms) 500
goroutine 27546 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00065ab60, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00065ab60, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc0078f78c0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc007261a90, 0xc005c39cc0, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc007261a90, 0xc0092cad00)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc007261a90, 0xc0092cad00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc007261a90, 0xc0092cad00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc007261a90, 0xc0092cad00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc007261a90, 0xc0092cad00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc007261a90, 0xc0092cad00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc007261a90, 0xc0092cad00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc007261a90, 0xc0092cad00)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc007261a90, 0xc0092cad00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc007261a90, 0xc0092cad00)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc007261a90, 0xc0092cad00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc007261a90, 0xc0092cac00)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc007261a90, 0xc0092cac00)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc007cb39e0, 0xc00c8aea40, 0x7423da0, 0xc007261a90, 0xc0092cac00)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56504]
I0613 01:30:41.933949  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings: (2.131054ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.934185  109680 storage_rbac.go:284] created rolebinding.rbac.authorization.k8s.io/system::leader-locking-kube-controller-manager in kube-system
I0613 01:30:41.947515  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:41.947660  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:41.947880  109680 wrap.go:47] GET /healthz: (1.642307ms) 500
goroutine 27548 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00065ae00, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00065ae00, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc009ae2180, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc007261ad8, 0xc009a6a3c0, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc007261ad8, 0xc0092cb600)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc007261ad8, 0xc0092cb600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc007261ad8, 0xc0092cb600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc007261ad8, 0xc0092cb600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc007261ad8, 0xc0092cb600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc007261ad8, 0xc0092cb600)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc007261ad8, 0xc0092cb600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc007261ad8, 0xc0092cb600)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc007261ad8, 0xc0092cb600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc007261ad8, 0xc0092cb600)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc007261ad8, 0xc0092cb600)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc007261ad8, 0xc0092cb500)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc007261ad8, 0xc0092cb500)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc007cb3da0, 0xc00c8aea40, 0x7423da0, 0xc007261ad8, 0xc0092cb500)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.957559  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system::leader-locking-kube-scheduler: (1.324155ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
E0613 01:30:41.957883  109680 factory.go:711] Error getting pod permit-plugin7714e4ce-22b1-4613-8a0d-145461140b81/test-pod for retry: Get http://127.0.0.1:41869/api/v1/namespaces/permit-plugin7714e4ce-22b1-4613-8a0d-145461140b81/pods/test-pod: dial tcp 127.0.0.1:41869: connect: connection refused; retrying...
I0613 01:30:41.959934  109680 wrap.go:47] GET /api/v1/namespaces/kube-system: (1.354777ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.974014  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings: (2.520273ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:41.974379  109680 storage_rbac.go:284] created rolebinding.rbac.authorization.k8s.io/system::leader-locking-kube-scheduler in kube-system
I0613 01:30:42.000605  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:controller:bootstrap-signer: (1.310218ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:42.002476  109680 wrap.go:47] GET /api/v1/namespaces/kube-system: (1.227365ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:42.013595  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings: (2.140678ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:42.014113  109680 storage_rbac.go:284] created rolebinding.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-system
I0613 01:30:42.031326  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:42.031397  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:42.031593  109680 wrap.go:47] GET /healthz: (1.667532ms) 500
goroutine 27563 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc000859f80, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc000859f80, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc009b255e0, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc000aa5fd8, 0xc009a6a8c0, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc000aa5fd8, 0xc00bbee200)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc000aa5fd8, 0xc00bbee200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc000aa5fd8, 0xc00bbee200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc000aa5fd8, 0xc00bbee200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc000aa5fd8, 0xc00bbee200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc000aa5fd8, 0xc00bbee200)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc000aa5fd8, 0xc00bbee200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc000aa5fd8, 0xc00bbee200)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc000aa5fd8, 0xc00bbee200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc000aa5fd8, 0xc00bbee200)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc000aa5fd8, 0xc00bbee200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc000aa5fd8, 0xc00bbee100)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc000aa5fd8, 0xc00bbee100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc009bb4480, 0xc00c8aea40, 0x7423da0, 0xc000aa5fd8, 0xc00bbee100)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56542]
I0613 01:30:42.039196  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:controller:cloud-provider: (6.934606ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:42.041200  109680 wrap.go:47] GET /api/v1/namespaces/kube-system: (1.531971ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:42.047083  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:42.047186  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:42.047379  109680 wrap.go:47] GET /healthz: (1.277989ms) 500
goroutine 27619 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00081ea10, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00081ea10, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc00779b320, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc004773b40, 0xc009a6af00, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc004773b40, 0xc008446800)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc004773b40, 0xc008446800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc004773b40, 0xc008446800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc004773b40, 0xc008446800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc004773b40, 0xc008446800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc004773b40, 0xc008446800)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc004773b40, 0xc008446800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc004773b40, 0xc008446800)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc004773b40, 0xc008446800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc004773b40, 0xc008446800)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc004773b40, 0xc008446800)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc004773b40, 0xc008446700)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc004773b40, 0xc008446700)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc004353680, 0xc00c8aea40, 0x7423da0, 0xc004773b40, 0xc008446700)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:42.053192  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings: (1.779716ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:42.053424  109680 storage_rbac.go:284] created rolebinding.rbac.authorization.k8s.io/system:controller:cloud-provider in kube-system
I0613 01:30:42.072850  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:controller:token-cleaner: (1.289425ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:42.074531  109680 wrap.go:47] GET /api/v1/namespaces/kube-system: (1.304879ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:42.093705  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings: (2.193645ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:42.093944  109680 storage_rbac.go:284] created rolebinding.rbac.authorization.k8s.io/system:controller:token-cleaner in kube-system
I0613 01:30:42.112897  109680 wrap.go:47] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-public/rolebindings/system:controller:bootstrap-signer: (1.402626ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:42.114876  109680 wrap.go:47] GET /api/v1/namespaces/kube-public: (1.550194ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:42.131071  109680 healthz.go:161] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I0613 01:30:42.131106  109680 healthz.go:175] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I0613 01:30:42.131263  109680 wrap.go:47] GET /healthz: (1.370239ms) 500
goroutine 27580 [running]:
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).recordStatus(0xc00043a7e0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/httplog.(*respLogger).WriteHeader(0xc00043a7e0, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*baseTimeoutWriter).WriteHeader(0xc009ff4920, 0x1f4)
net/http.Error(0x7f10b2015268, 0xc0026c5df0, 0xc0091a6500, 0x136, 0x1f4)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/healthz.handleRootHealthz.func1(0x7f10b2015268, 0xc0026c5df0, 0xc00c30c200)
net/http.HandlerFunc.ServeHTTP(0xc00372c240, 0x7f10b2015268, 0xc0026c5df0, 0xc00c30c200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*pathHandler).ServeHTTP(0xc003aa75c0, 0x7f10b2015268, 0xc0026c5df0, 0xc00c30c200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/mux.(*PathRecorderMux).ServeHTTP(0xc00c9e2620, 0x7f10b2015268, 0xc0026c5df0, 0xc00c30c200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x43bee9b, 0xe, 0xc0083d5a70, 0xc00c9e2620, 0x7f10b2015268, 0xc0026c5df0, 0xc00c30c200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7f10b2015268, 0xc0026c5df0, 0xc00c30c200)
net/http.HandlerFunc.ServeHTTP(0xc00c61a5c0, 0x7f10b2015268, 0xc0026c5df0, 0xc00c30c200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func1(0x7f10b2015268, 0xc0026c5df0, 0xc00c30c200)
net/http.HandlerFunc.ServeHTTP(0xc0030b6de0, 0x7f10b2015268, 0xc0026c5df0, 0xc00c30c200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7f10b2015268, 0xc0026c5df0, 0xc00c30c200)
net/http.HandlerFunc.ServeHTTP(0xc00c61a600, 0x7f10b2015268, 0xc0026c5df0, 0xc00c30c200)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f10b2015268, 0xc0026c5df0, 0xc00c30c100)
net/http.HandlerFunc.ServeHTTP(0xc00bd91310, 0x7f10b2015268, 0xc0026c5df0, 0xc00c30c100)
k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0062bda40, 0xc00c8aea40, 0x7423da0, 0xc0026c5df0, 0xc00c30c100)
created by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP

logging error output: "[+]ping ok\n[+]log ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/bootstrap-controller ok\n[-]poststarthook/rbac/bootstrap-roles failed: reason withheld\n[+]poststarthook/scheduling/bootstrap-system-priority-classes ok\n[+]poststarthook/ca-registration ok\nhealthz check failed\n"
 [Go-http-client/1.1 127.0.0.1:56542]
I0613 01:30:42.133261  109680 wrap.go:47] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-public/rolebindings: (1.862367ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:42.135038  109680 storage_rbac.go:284] created rolebinding.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-public
I0613 01:30:42.147155  109680 wrap.go:47] GET /healthz: (1.040399ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:42.148609  109680 wrap.go:47] GET /api/v1/namespaces/default: (1.07244ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:42.150614  109680 wrap.go:47] POST /api/v1/namespaces: (1.675604ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:42.151989  109680 wrap.go:47] GET /api/v1/namespaces/default/services/kubernetes: (1.010297ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:42.155996  109680 wrap.go:47] POST /api/v1/namespaces/default/services: (3.589739ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:42.157611  109680 wrap.go:47] GET /api/v1/namespaces/default/endpoints/kubernetes: (1.290821ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:42.159698  109680 wrap.go:47] POST /api/v1/namespaces/default/endpoints: (1.716958ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:42.231338  109680 wrap.go:47] GET /healthz: (1.417145ms) 200 [Go-http-client/1.1 127.0.0.1:56504]
W0613 01:30:42.232570  109680 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0613 01:30:42.232685  109680 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0613 01:30:42.232762  109680 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0613 01:30:42.232804  109680 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0613 01:30:42.232834  109680 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0613 01:30:42.232871  109680 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0613 01:30:42.232900  109680 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0613 01:30:42.232930  109680 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0613 01:30:42.232953  109680 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0613 01:30:42.232976  109680 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
I0613 01:30:42.233057  109680 factory.go:345] Creating scheduler from algorithm provider 'DefaultProvider'
I0613 01:30:42.233085  109680 factory.go:433] Creating scheduler with fit predicates 'map[CheckNodeCondition:{} CheckNodeDiskPressure:{} CheckNodeMemoryPressure:{} CheckNodePIDPressure:{} CheckVolumeBinding:{} GeneralPredicates:{} MatchInterPodAffinity:{} MaxAzureDiskVolumeCount:{} MaxCSIVolumeCountPred:{} MaxEBSVolumeCount:{} MaxGCEPDVolumeCount:{} NoDiskConflict:{} NoVolumeZoneConflict:{} PodToleratesNodeTaints:{}]' and priority functions 'map[BalancedResourceAllocation:{} ImageLocalityPriority:{} InterPodAffinityPriority:{} LeastRequestedPriority:{} NodeAffinityPriority:{} NodePreferAvoidPodsPriority:{} SelectorSpreadPriority:{} TaintTolerationPriority:{}]'
I0613 01:30:42.233338  109680 controller_utils.go:1029] Waiting for caches to sync for scheduler controller
I0613 01:30:42.233816  109680 reflector.go:122] Starting reflector *v1.Pod (12h0m0s) from k8s.io/kubernetes/test/integration/scheduler/util.go:223
I0613 01:30:42.233884  109680 reflector.go:160] Listing and watching *v1.Pod from k8s.io/kubernetes/test/integration/scheduler/util.go:223
I0613 01:30:42.235130  109680 wrap.go:47] GET /api/v1/pods?fieldSelector=status.phase%21%3DFailed%2Cstatus.phase%21%3DSucceeded&limit=500&resourceVersion=0: (946.336µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:30:42.235971  109680 get.go:250] Starting watch for /api/v1/pods, rv=24441 labels= fields=status.phase!=Failed,status.phase!=Succeeded timeout=8m44s
I0613 01:30:42.336002  109680 shared_informer.go:176] caches populated
I0613 01:30:42.336039  109680 controller_utils.go:1036] Caches are synced for scheduler controller
I0613 01:30:42.336375  109680 reflector.go:122] Starting reflector *v1.StorageClass (1s) from k8s.io/client-go/informers/factory.go:133
I0613 01:30:42.336395  109680 reflector.go:160] Listing and watching *v1.StorageClass from k8s.io/client-go/informers/factory.go:133
I0613 01:30:42.336552  109680 reflector.go:122] Starting reflector *v1.PersistentVolumeClaim (1s) from k8s.io/client-go/informers/factory.go:133
I0613 01:30:42.336575  109680 reflector.go:160] Listing and watching *v1.PersistentVolumeClaim from k8s.io/client-go/informers/factory.go:133
I0613 01:30:42.336824  109680 reflector.go:122] Starting reflector *v1.Service (1s) from k8s.io/client-go/informers/factory.go:133
I0613 01:30:42.336838  109680 reflector.go:160] Listing and watching *v1.Service from k8s.io/client-go/informers/factory.go:133
I0613 01:30:42.337121  109680 reflector.go:122] Starting reflector *v1.PersistentVolume (1s) from k8s.io/client-go/informers/factory.go:133
I0613 01:30:42.337135  109680 reflector.go:160] Listing and watching *v1.PersistentVolume from k8s.io/client-go/informers/factory.go:133
I0613 01:30:42.337205  109680 reflector.go:122] Starting reflector *v1.ReplicationController (1s) from k8s.io/client-go/informers/factory.go:133
I0613 01:30:42.337219  109680 reflector.go:160] Listing and watching *v1.ReplicationController from k8s.io/client-go/informers/factory.go:133
I0613 01:30:42.337468  109680 reflector.go:122] Starting reflector *v1.StatefulSet (1s) from k8s.io/client-go/informers/factory.go:133
I0613 01:30:42.337480  109680 reflector.go:160] Listing and watching *v1.StatefulSet from k8s.io/client-go/informers/factory.go:133
I0613 01:30:42.337579  109680 reflector.go:122] Starting reflector *v1beta1.PodDisruptionBudget (1s) from k8s.io/client-go/informers/factory.go:133
I0613 01:30:42.337593  109680 reflector.go:160] Listing and watching *v1beta1.PodDisruptionBudget from k8s.io/client-go/informers/factory.go:133
I0613 01:30:42.337942  109680 reflector.go:122] Starting reflector *v1.ReplicaSet (1s) from k8s.io/client-go/informers/factory.go:133
I0613 01:30:42.337956  109680 reflector.go:160] Listing and watching *v1.ReplicaSet from k8s.io/client-go/informers/factory.go:133
I0613 01:30:42.338015  109680 reflector.go:122] Starting reflector *v1.Node (1s) from k8s.io/client-go/informers/factory.go:133
I0613 01:30:42.338029  109680 reflector.go:160] Listing and watching *v1.Node from k8s.io/client-go/informers/factory.go:133
I0613 01:30:42.356242  109680 wrap.go:47] GET /apis/storage.k8s.io/v1/storageclasses?limit=500&resourceVersion=0: (570.435µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:42.356373  109680 wrap.go:47] GET /api/v1/persistentvolumeclaims?limit=500&resourceVersion=0: (564.547µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56722]
I0613 01:30:42.356954  109680 wrap.go:47] GET /api/v1/services?limit=500&resourceVersion=0: (480.002µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56724]
I0613 01:30:42.357248  109680 wrap.go:47] GET /api/v1/nodes?limit=500&resourceVersion=0: (399.06µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:30:42.357342  109680 get.go:250] Starting watch for /apis/storage.k8s.io/v1/storageclasses, rv=24441 labels= fields= timeout=9m51s
I0613 01:30:42.357687  109680 wrap.go:47] GET /api/v1/persistentvolumes?limit=500&resourceVersion=0: (336.503µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56726]
I0613 01:30:42.358116  109680 wrap.go:47] GET /api/v1/replicationcontrollers?limit=500&resourceVersion=0: (343.213µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56728]
I0613 01:30:42.358528  109680 wrap.go:47] GET /apis/apps/v1/statefulsets?limit=500&resourceVersion=0: (330.451µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56730]
I0613 01:30:42.358925  109680 get.go:250] Starting watch for /api/v1/nodes, rv=24441 labels= fields= timeout=8m28s
I0613 01:30:42.358986  109680 wrap.go:47] GET /apis/policy/v1beta1/poddisruptionbudgets?limit=500&resourceVersion=0: (376.457µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56732]
I0613 01:30:42.359421  109680 wrap.go:47] GET /apis/apps/v1/replicasets?limit=500&resourceVersion=0: (344.554µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56734]
I0613 01:30:42.359645  109680 get.go:250] Starting watch for /api/v1/persistentvolumeclaims, rv=24441 labels= fields= timeout=6m34s
I0613 01:30:42.359751  109680 get.go:250] Starting watch for /api/v1/replicationcontrollers, rv=24441 labels= fields= timeout=6m48s
I0613 01:30:42.359950  109680 get.go:250] Starting watch for /api/v1/services, rv=24564 labels= fields= timeout=8m2s
I0613 01:30:42.359996  109680 get.go:250] Starting watch for /apis/policy/v1beta1/poddisruptionbudgets, rv=24441 labels= fields= timeout=6m30s
I0613 01:30:42.360216  109680 get.go:250] Starting watch for /apis/apps/v1/replicasets, rv=24441 labels= fields= timeout=5m28s
I0613 01:30:42.360235  109680 get.go:250] Starting watch for /apis/apps/v1/statefulsets, rv=24441 labels= fields= timeout=9m33s
I0613 01:30:42.360354  109680 get.go:250] Starting watch for /api/v1/persistentvolumes, rv=24441 labels= fields= timeout=6m52s
I0613 01:30:42.436359  109680 shared_informer.go:176] caches populated
I0613 01:30:42.536515  109680 shared_informer.go:176] caches populated
I0613 01:30:42.636753  109680 shared_informer.go:176] caches populated
I0613 01:30:42.737015  109680 shared_informer.go:176] caches populated
I0613 01:30:42.837244  109680 shared_informer.go:176] caches populated
I0613 01:30:42.939957  109680 shared_informer.go:176] caches populated
I0613 01:30:43.040168  109680 shared_informer.go:176] caches populated
I0613 01:30:43.140401  109680 shared_informer.go:176] caches populated
I0613 01:30:43.240607  109680 shared_informer.go:176] caches populated
I0613 01:30:43.341395  109680 shared_informer.go:176] caches populated
I0613 01:30:43.343952  109680 wrap.go:47] POST /api/v1/nodes: (2.051362ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:43.355623  109680 wrap.go:47] PUT /api/v1/nodes/testnode/status: (7.287473ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:43.356929  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:43.358047  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:43.358226  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:43.358574  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:43.359467  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:43.363814  109680 wrap.go:47] POST /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods: (7.828462ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:43.364417  109680 scheduling_queue.go:815] About to try and schedule pod node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pidpressure-fake-name
I0613 01:30:43.364432  109680 scheduler.go:456] Attempting to schedule pod: node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pidpressure-fake-name
I0613 01:30:43.364558  109680 scheduler_binder.go:256] AssumePodVolumes for pod "node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pidpressure-fake-name", node "testnode"
I0613 01:30:43.364573  109680 scheduler_binder.go:266] AssumePodVolumes for pod "node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pidpressure-fake-name", node "testnode": all PVCs bound and nothing to do
I0613 01:30:43.364617  109680 factory.go:727] Attempting to bind pidpressure-fake-name to testnode
I0613 01:30:43.368844  109680 wrap.go:47] POST /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name/binding: (2.27674ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:43.369152  109680 scheduler.go:593] pod node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pidpressure-fake-name is bound successfully on node testnode, 1 nodes evaluated, 1 nodes were found feasible
I0613 01:30:43.371118  109680 wrap.go:47] POST /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/events: (1.644832ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:43.465851  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.288991ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:43.566204  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.624552ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:43.666385  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.823919ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:43.766418  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.813147ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:43.866104  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.50478ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:43.965895  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.242887ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:44.067038  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.365958ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:44.165979  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.311036ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:44.266242  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.614632ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:44.357099  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:44.358199  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:44.358365  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:44.358968  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:44.359575  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:44.365987  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.403271ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:44.469472  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (3.696956ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:44.571314  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.068078ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:44.665959  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.327606ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:44.766341  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.672619ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:44.867155  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.450161ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:44.966212  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.563581ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:45.066209  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.498539ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:45.166002  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.412923ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:45.265829  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.257007ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:45.357284  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:45.358333  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:45.358519  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:45.359110  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:45.359676  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:45.366370  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.774217ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:45.466061  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.475128ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:45.565747  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.209819ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:45.666013  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.460196ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:45.765985  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.410664ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:45.866095  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.471015ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:45.965974  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.37014ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:46.066153  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.524879ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:46.165876  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.228862ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:46.265933  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.320994ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:46.357476  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:46.358536  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:46.358899  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:46.359282  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:46.359814  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:46.366244  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.63768ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:46.466800  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.201469ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:46.567815  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (3.244866ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:46.665966  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.353994ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:46.767647  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (3.083069ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:46.865987  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.386389ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:46.966203  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.557435ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:47.066272  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.629004ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:47.166670  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.633797ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:47.266107  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.472332ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:47.357671  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:47.358751  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:47.359362  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:47.359503  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:47.359979  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:47.367457  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.863867ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:47.466244  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.611068ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:47.566188  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.569615ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:47.666219  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.63449ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:47.766567  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.976954ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:47.866084  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.42977ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:47.966433  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.779758ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:48.066095  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.49944ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:48.166642  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.989084ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:48.266644  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.995581ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:48.357918  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:48.358958  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:48.359483  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:48.359627  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:48.360089  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:48.366152  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.543594ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:48.466919  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.784288ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:48.566578  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.41096ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:48.668163  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.681009ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:48.766559  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.889836ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:48.866351  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.676986ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:48.966407  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.636636ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:49.066282  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.550954ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:49.166568  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.907567ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:49.266730  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.780391ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:49.358309  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:49.359185  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:49.360240  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:49.360356  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:49.360378  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:49.368066  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.546164ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:49.467197  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.617538ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:49.567284  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.653172ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:49.666361  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.770909ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:49.766519  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.945747ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:49.866509  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.819392ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:49.966555  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.879729ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:50.067072  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.437499ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:50.166902  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.170441ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:50.266956  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.648935ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:50.358874  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:50.359379  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:50.360371  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:50.360483  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:50.360498  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:50.366047  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.459773ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:50.467325  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.900753ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:50.566353  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.769195ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:50.667957  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.709559ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:50.766222  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.608851ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:50.870261  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (5.380816ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:50.970252  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (5.152901ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:51.068308  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (3.660584ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:51.171121  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.847284ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:51.269057  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (4.417472ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:51.359020  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:51.359530  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:51.360578  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:51.360713  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:51.360805  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:51.366524  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.929657ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:51.466798  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.18903ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
E0613 01:30:51.524108  109680 event.go:249] Unable to write event: 'Patch http://127.0.0.1:41869/api/v1/namespaces/permit-plugin7714e4ce-22b1-4613-8a0d-145461140b81/events/test-pod.15a79dea7c9775c3: dial tcp 127.0.0.1:41869: connect: connection refused' (may retry after sleeping)
I0613 01:30:51.566119  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.528611ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:51.667775  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.751009ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:51.766715  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.941465ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:51.866481  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.889198ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:51.966499  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.843071ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:52.066426  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.75017ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:52.149300  109680 wrap.go:47] GET /api/v1/namespaces/default: (1.503469ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:52.150770  109680 wrap.go:47] GET /api/v1/namespaces/default/services/kubernetes: (1.115734ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:52.152046  109680 wrap.go:47] GET /api/v1/namespaces/default/endpoints/kubernetes: (950.402µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:52.170955  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.564299ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:52.274396  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (9.788631ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:52.359237  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:52.359676  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:52.360701  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:52.360830  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:52.360874  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:52.367302  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.732364ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:52.466006  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.452765ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:52.566314  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.730223ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:52.666151  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.510108ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:52.766549  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.95235ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:52.866584  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.975106ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:52.966447  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.801424ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:53.066073  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.434291ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:53.166397  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.723561ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:53.267476  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.315549ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:53.359641  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:53.359844  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:53.360840  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:53.360953  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:53.360981  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:53.366077  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.52466ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:53.466381  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.70641ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:53.566559  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.947883ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:53.667963  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (3.421357ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:53.766361  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.77391ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:53.866005  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.346951ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:53.966391  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.698464ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:54.066335  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.685118ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:54.166372  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.690136ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:54.266229  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.545668ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:54.359851  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:54.360380  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:54.361791  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:54.361898  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:54.361950  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:54.366436  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.818473ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:54.466459  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.832741ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:54.566359  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.738937ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:54.666379  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.720157ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
E0613 01:30:54.758478  109680 factory.go:711] Error getting pod permit-plugin7714e4ce-22b1-4613-8a0d-145461140b81/test-pod for retry: Get http://127.0.0.1:41869/api/v1/namespaces/permit-plugin7714e4ce-22b1-4613-8a0d-145461140b81/pods/test-pod: dial tcp 127.0.0.1:41869: connect: connection refused; retrying...
I0613 01:30:54.766365  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.788858ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:54.866478  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.690357ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:54.966649  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.959806ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:55.066678  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.931702ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:55.166454  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.763626ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:55.266297  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.504947ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:55.360033  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:55.360679  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:55.361914  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:55.362006  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:55.362061  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:55.366416  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.838038ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:55.466506  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.639281ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:55.566505  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.648526ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:55.666376  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.744696ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:55.766355  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.740817ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:55.866474  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.810567ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:55.966169  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.5405ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:56.069622  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.708213ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:56.166266  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.646796ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:56.266227  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.605791ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:56.360233  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:56.360849  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:56.362718  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:56.362815  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:56.362911  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:56.365978  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.433973ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:56.467203  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.565044ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:56.566387  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.789601ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:56.666534  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.898389ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:56.766139  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.525088ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:56.866398  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.754395ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:56.966480  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.805339ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:57.066574  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.904649ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:57.166390  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.765688ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:57.266481  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.791721ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:57.360944  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:57.361015  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:57.362929  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:57.363032  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:57.363204  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:57.379917  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (15.3112ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:57.466313  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.67232ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:57.566548  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.960357ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:57.666225  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.656607ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:57.766184  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.608095ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:57.866644  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.907434ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:57.966370  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.72912ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:58.066600  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.964659ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:58.166202  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.588561ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:58.266465  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.816044ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:58.361166  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:58.361225  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:58.363043  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:58.363144  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:58.363330  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:58.366431  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.849681ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:58.466457  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.759217ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:58.567459  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.883464ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:58.666475  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.838709ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:58.766612  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.865304ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:58.867689  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (3.06495ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:58.966512  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.870366ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:59.066211  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.613451ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:59.166517  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.798588ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:59.266588  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.938388ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:59.361349  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:59.361398  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:59.363237  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:59.363333  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:59.363463  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:30:59.366329  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.743102ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:59.466502  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.935059ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:59.566224  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.633434ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:59.666132  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.432823ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:59.766477  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.801503ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:59.867311  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.690914ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:30:59.966517  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.816379ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:00.066657  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.959217ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:00.166583  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.894691ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:00.266401  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.8049ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:00.361558  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:00.361558  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:00.363422  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:00.363483  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:00.363559  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:00.366172  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.444493ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:00.467955  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.684878ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:00.565876  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.312156ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:00.666338  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.678805ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:00.766156  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.559757ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:00.867556  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.935649ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:00.966234  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.559804ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:01.066363  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.71772ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:01.166390  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.733472ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:01.266415  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.763068ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:01.361748  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:01.361807  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:01.363556  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:01.363637  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:01.363661  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:01.366078  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.479641ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:01.466245  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.635441ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
E0613 01:31:01.524753  109680 event.go:249] Unable to write event: 'Patch http://127.0.0.1:41869/api/v1/namespaces/permit-plugin7714e4ce-22b1-4613-8a0d-145461140b81/events/test-pod.15a79dea7c9775c3: dial tcp 127.0.0.1:41869: connect: connection refused' (may retry after sleeping)
I0613 01:31:01.566680  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.046574ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:01.666270  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.663975ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:01.766375  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.768572ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:01.866627  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.934993ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:01.966210  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.573549ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:02.066431  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.704627ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:02.153032  109680 wrap.go:47] GET /api/v1/namespaces/default: (4.58503ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:02.155178  109680 wrap.go:47] GET /api/v1/namespaces/default/services/kubernetes: (1.738858ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:02.157424  109680 wrap.go:47] GET /api/v1/namespaces/default/endpoints/kubernetes: (1.892711ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:02.166643  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.548954ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:02.266450  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.788156ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:02.361946  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:02.362027  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:02.363733  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:02.363737  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:02.363807  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:02.366053  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.554563ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:02.466390  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.74224ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:02.566795  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.729134ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:02.666383  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.679643ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:02.766245  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.646694ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:02.868988  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.598784ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:02.968087  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.812203ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:03.066047  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.428855ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:03.166363  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.716861ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:03.266136  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.517948ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:03.363667  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:03.363703  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:03.363939  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:03.364076  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:03.364639  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:03.366182  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.619617ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:03.466636  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.817994ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:03.566363  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.718654ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:03.666714  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.027866ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:03.766284  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.652631ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:03.866500  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.852468ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:03.966237  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.626276ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:04.066610  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.88765ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:04.166123  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.529529ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:04.266026  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.453925ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:04.363806  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:04.363846  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:04.364032  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:04.364206  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:04.365587  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:04.366068  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.520157ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:04.467841  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.576151ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:04.566459  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.784971ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:04.666302  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.575594ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:04.766463  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.872063ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:04.866503  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.808429ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:04.966321  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.691284ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:05.066431  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.759651ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:05.166280  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.652219ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:05.266708  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.051371ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:05.363949  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:05.363949  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:05.364196  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:05.364273  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:05.365798  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:05.366185  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.60811ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:05.466144  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.510037ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:05.566040  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.433961ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:05.666415  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.780721ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:05.766516  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.919824ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:05.866339  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.706636ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:05.966300  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.622245ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:06.066197  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.509539ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:06.166293  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.624561ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:06.266021  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.416987ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:06.364099  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:06.364146  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:06.364359  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:06.364443  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:06.365943  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:06.367251  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.696004ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:06.466227  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.543354ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:06.582780  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (14.560613ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:06.665944  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.3577ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:06.766535  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.95113ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:06.866745  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.958093ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:06.966233  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.598509ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:07.066755  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.515453ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:07.166318  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.668337ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:07.266106  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.456912ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:07.364194  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:07.364289  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:07.364509  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:07.364571  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:07.366071  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.455311ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:07.366337  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:07.466434  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.88731ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:07.579036  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (14.518617ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:07.666146  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.573797ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:07.766335  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.70877ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:07.874654  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (9.933172ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:07.966400  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.780244ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:08.066409  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.718481ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:08.166124  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.521841ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:08.267032  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.157025ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:08.364370  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:08.364436  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:08.364726  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:08.364769  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:08.366218  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.668355ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:08.366522  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:08.472148  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (6.398488ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:08.570204  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (4.733194ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:08.666477  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.847405ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:08.766732  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.196623ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:08.870576  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (5.944691ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:08.966258  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.67685ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:09.066297  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.583824ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:09.166289  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.624875ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:09.266066  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.457461ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:09.364895  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:09.365035  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:09.365061  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:09.365078  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:09.366336  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.817177ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:09.366636  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:09.466429  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.804672ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:09.566130  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.482047ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:09.666186  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.629244ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:09.766130  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.553759ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:09.865993  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.39369ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:09.966926  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.29639ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:10.066769  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.770968ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:10.166073  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.449712ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:10.266652  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.939862ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:10.365068  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:10.365171  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:10.365192  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:10.365197  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:10.366224  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.646648ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:10.372869  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:10.466377  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.756339ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:10.566234  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.615123ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:10.666221  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.636433ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:10.766078  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.48288ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:10.866895  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.809758ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:10.972697  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (8.069102ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:11.067403  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.907778ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:11.166417  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.719787ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:11.266364  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.766706ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:11.365228  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:11.365335  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:11.365410  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:11.365426  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:11.366210  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.580035ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:11.373068  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:11.465826  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.286374ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
E0613 01:31:11.525448  109680 event.go:249] Unable to write event: 'Patch http://127.0.0.1:41869/api/v1/namespaces/permit-plugin7714e4ce-22b1-4613-8a0d-145461140b81/events/test-pod.15a79dea7c9775c3: dial tcp 127.0.0.1:41869: connect: connection refused' (may retry after sleeping)
I0613 01:31:11.565900  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.318325ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:11.666612  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.890618ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:11.765876  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.287346ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:11.866659  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.977858ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:11.966179  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.546465ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:12.067299  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.625541ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:12.149972  109680 wrap.go:47] GET /api/v1/namespaces/default: (1.2921ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:12.151440  109680 wrap.go:47] GET /api/v1/namespaces/default/services/kubernetes: (1.086195ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:12.152654  109680 wrap.go:47] GET /api/v1/namespaces/default/endpoints/kubernetes: (908.196µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:12.166376  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.690353ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:12.266456  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.851035ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:12.365353  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:12.365486  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:12.365506  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:12.366419  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.845665ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:12.366711  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:12.373253  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:12.467550  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.513057ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:12.571356  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.88273ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:12.666076  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.433872ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:12.766161  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.579231ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:12.866769  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.088986ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:12.966367  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.698867ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:13.068583  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.493746ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:13.166848  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.171019ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:13.268989  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.66863ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:13.365938  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:13.365942  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.368196ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:13.365974  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:13.366055  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:13.366880  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:13.369020  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (2.610432ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:13.374073  109680 reflector.go:243] k8s.io/client-go/informers/factory.go:133: forcing resync
I0613 01:31:13.398388  109680 wrap.go:47] DELETE /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (28.87988ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:13.401467  109680 wrap.go:47] GET /api/v1/namespaces/node-pid-pressure373c57ba-7700-4a12-a03b-ad79aede1b96/pods/pidpressure-fake-name: (1.485035ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
E0613 01:31:13.402526  109680 scheduling_queue.go:818] Error while retrieving next pod from scheduling queue: scheduling queue is closed
I0613 01:31:13.402990  109680 wrap.go:47] GET /api/v1/persistentvolumeclaims?resourceVersion=24441&timeout=6m34s&timeoutSeconds=394&watch=true: (31.043553034s) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56722]
I0613 01:31:13.403137  109680 wrap.go:47] GET /apis/policy/v1beta1/poddisruptionbudgets?resourceVersion=24441&timeout=6m30s&timeoutSeconds=390&watch=true: (31.043339498s) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56726]
I0613 01:31:13.403253  109680 wrap.go:47] GET /api/v1/services?resourceVersion=24564&timeout=8m2s&timeoutSeconds=482&watch=true: (31.043564511s) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56724]
I0613 01:31:13.403375  109680 wrap.go:47] GET /apis/apps/v1/replicasets?resourceVersion=24441&timeout=5m28s&timeoutSeconds=328&watch=true: (31.043337668s) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56734]
I0613 01:31:13.403492  109680 wrap.go:47] GET /apis/apps/v1/statefulsets?resourceVersion=24441&timeout=9m33s&timeoutSeconds=573&watch=true: (31.043442584s) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56730]
I0613 01:31:13.403593  109680 wrap.go:47] GET /api/v1/replicationcontrollers?resourceVersion=24441&timeout=6m48s&timeoutSeconds=408&watch=true: (31.044102619s) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56728]
I0613 01:31:13.403696  109680 wrap.go:47] GET /api/v1/persistentvolumes?resourceVersion=24441&timeout=6m52s&timeoutSeconds=412&watch=true: (31.043539741s) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56732]
I0613 01:31:13.403796  109680 wrap.go:47] GET /api/v1/nodes?resourceVersion=24441&timeout=8m28s&timeoutSeconds=508&watch=true: (31.045142801s) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56542]
I0613 01:31:13.403924  109680 wrap.go:47] GET /api/v1/pods?fieldSelector=status.phase%21%3DFailed%2Cstatus.phase%21%3DSucceeded&resourceVersion=24441&timeoutSeconds=524&watch=true: (31.168313331s) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56504]
I0613 01:31:13.404885  109680 wrap.go:47] GET /apis/storage.k8s.io/v1/storageclasses?resourceVersion=24441&timeout=9m51s&timeoutSeconds=591&watch=true: (31.047737523s) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56738]
I0613 01:31:13.414792  109680 wrap.go:47] DELETE /api/v1/nodes: (10.762108ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:13.415046  109680 controller.go:176] Shutting down kubernetes service endpoint reconciler
I0613 01:31:13.426000  109680 wrap.go:47] GET /api/v1/namespaces/default/endpoints/kubernetes: (10.520871ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
I0613 01:31:13.431515  109680 wrap.go:47] PUT /api/v1/namespaces/default/endpoints/kubernetes: (4.932724ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:56736]
predicates_test.go:918: Test Failed: error, timed out waiting for the condition, while waiting for scheduled
				from junit_d431ed5f68ae4ddf888439fb96b687a923412204_20190613-012418.xml

Find permit-plugin7714e4ce-22b1-4613-8a0d-145461140b81/test-pod mentions in log files | View test history on testgrid


Show 1692 Passed Tests

Show 4 Skipped Tests

Error lines from build-log.txt

... skipping 319 lines ...
W0613 01:18:25.674] I0613 01:18:25.673991   48662 serving.go:312] Generated self-signed cert (/tmp/apiserver.crt, /tmp/apiserver.key)
W0613 01:18:25.675] I0613 01:18:25.674064   48662 server.go:560] external host was not specified, using 172.17.0.2
W0613 01:18:25.675] W0613 01:18:25.674074   48662 authentication.go:415] AnonymousAuth is not allowed with the AlwaysAllow authorizer. Resetting AnonymousAuth to false. You should use a different authorizer
W0613 01:18:25.675] I0613 01:18:25.674524   48662 server.go:147] Version: v1.16.0-alpha.0.934+926cf4a58d8a98
W0613 01:18:26.205] I0613 01:18:26.205128   48662 plugins.go:158] Loaded 4 mutating admission controller(s) successfully in the following order: NamespaceLifecycle,LimitRanger,TaintNodesByCondition,Priority.
W0613 01:18:26.206] I0613 01:18:26.205155   48662 plugins.go:161] Loaded 4 validating admission controller(s) successfully in the following order: LimitRanger,Priority,PersistentVolumeClaimResize,ResourceQuota.
W0613 01:18:26.207] E0613 01:18:26.205882   48662 prometheus.go:55] failed to register depth metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:26.207] E0613 01:18:26.205930   48662 prometheus.go:68] failed to register adds metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:26.207] E0613 01:18:26.205955   48662 prometheus.go:82] failed to register latency metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:26.207] E0613 01:18:26.205978   48662 prometheus.go:96] failed to register workDuration metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:26.208] E0613 01:18:26.206003   48662 prometheus.go:112] failed to register unfinished metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:26.208] E0613 01:18:26.206025   48662 prometheus.go:126] failed to register unfinished metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:26.208] E0613 01:18:26.206042   48662 prometheus.go:152] failed to register depth metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:26.209] E0613 01:18:26.206059   48662 prometheus.go:164] failed to register adds metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:26.209] E0613 01:18:26.206158   48662 prometheus.go:176] failed to register latency metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:26.209] E0613 01:18:26.206223   48662 prometheus.go:188] failed to register work_duration metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:26.209] E0613 01:18:26.206251   48662 prometheus.go:203] failed to register unfinished_work_seconds metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:26.210] E0613 01:18:26.206277   48662 prometheus.go:216] failed to register longest_running_processor_microseconds metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:26.210] I0613 01:18:26.206310   48662 plugins.go:158] Loaded 4 mutating admission controller(s) successfully in the following order: NamespaceLifecycle,LimitRanger,TaintNodesByCondition,Priority.
W0613 01:18:26.210] I0613 01:18:26.206322   48662 plugins.go:161] Loaded 4 validating admission controller(s) successfully in the following order: LimitRanger,Priority,PersistentVolumeClaimResize,ResourceQuota.
W0613 01:18:26.211] I0613 01:18:26.210799   48662 client.go:354] parsed scheme: ""
W0613 01:18:26.211] I0613 01:18:26.210906   48662 client.go:354] scheme "" not registered, fallback to default scheme
W0613 01:18:26.211] I0613 01:18:26.210967   48662 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
W0613 01:18:26.211] I0613 01:18:26.211077   48662 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
... skipping 361 lines ...
W0613 01:18:26.847] W0613 01:18:26.846620   48662 genericapiserver.go:351] Skipping API storage.k8s.io/v1alpha1 because it has no resources.
W0613 01:18:27.204] I0613 01:18:27.203619   48662 client.go:354] parsed scheme: ""
W0613 01:18:27.204] I0613 01:18:27.203978   48662 client.go:354] scheme "" not registered, fallback to default scheme
W0613 01:18:27.204] I0613 01:18:27.204081   48662 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
W0613 01:18:27.205] I0613 01:18:27.204129   48662 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
W0613 01:18:27.205] I0613 01:18:27.205189   48662 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
W0613 01:18:27.875] E0613 01:18:27.874997   48662 prometheus.go:55] failed to register depth metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:27.876] E0613 01:18:27.875077   48662 prometheus.go:68] failed to register adds metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:27.876] E0613 01:18:27.875163   48662 prometheus.go:82] failed to register latency metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:27.876] E0613 01:18:27.875208   48662 prometheus.go:96] failed to register workDuration metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:27.876] E0613 01:18:27.875251   48662 prometheus.go:112] failed to register unfinished metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:27.877] E0613 01:18:27.875283   48662 prometheus.go:126] failed to register unfinished metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:27.877] E0613 01:18:27.875302   48662 prometheus.go:152] failed to register depth metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:27.877] E0613 01:18:27.875320   48662 prometheus.go:164] failed to register adds metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:27.878] E0613 01:18:27.875405   48662 prometheus.go:176] failed to register latency metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:27.878] E0613 01:18:27.875464   48662 prometheus.go:188] failed to register work_duration metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:27.878] E0613 01:18:27.875503   48662 prometheus.go:203] failed to register unfinished_work_seconds metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:27.878] E0613 01:18:27.875529   48662 prometheus.go:216] failed to register longest_running_processor_microseconds metric admission_quota_controller: duplicate metrics collector registration attempted
W0613 01:18:27.878] I0613 01:18:27.875573   48662 plugins.go:158] Loaded 4 mutating admission controller(s) successfully in the following order: NamespaceLifecycle,LimitRanger,TaintNodesByCondition,Priority.
W0613 01:18:27.879] I0613 01:18:27.875579   48662 plugins.go:161] Loaded 4 validating admission controller(s) successfully in the following order: LimitRanger,Priority,PersistentVolumeClaimResize,ResourceQuota.
W0613 01:18:27.879] I0613 01:18:27.876945   48662 client.go:354] parsed scheme: ""
W0613 01:18:27.879] I0613 01:18:27.876960   48662 client.go:354] scheme "" not registered, fallback to default scheme
W0613 01:18:27.879] I0613 01:18:27.876995   48662 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
W0613 01:18:27.879] I0613 01:18:27.877037   48662 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
... skipping 107 lines ...
W0613 01:19:04.875] I0613 01:19:04.874419   52013 controller_utils.go:1029] Waiting for caches to sync for disruption controller
W0613 01:19:04.875] I0613 01:19:04.874421   52013 controller_utils.go:1029] Waiting for caches to sync for TTL controller
W0613 01:19:04.875] I0613 01:19:04.873316   52013 deployment_controller.go:152] Starting deployment controller
W0613 01:19:04.875] I0613 01:19:04.874475   52013 controller_utils.go:1029] Waiting for caches to sync for deployment controller
W0613 01:19:04.876] I0613 01:19:04.873330   52013 stateful_set.go:145] Starting stateful set controller
W0613 01:19:04.876] I0613 01:19:04.874545   52013 controller_utils.go:1029] Waiting for caches to sync for stateful set controller
W0613 01:19:04.876] E0613 01:19:04.874836   52013 core.go:76] Failed to start service controller: WARNING: no cloud provider provided, services of type LoadBalancer will fail
W0613 01:19:04.876] W0613 01:19:04.874947   52013 controllermanager.go:524] Skipping "service"
W0613 01:19:04.876] W0613 01:19:04.874970   52013 controllermanager.go:524] Skipping "ttl-after-finished"
I0613 01:19:04.977] node/127.0.0.1 created
I0613 01:19:04.977] +++ [0613 01:19:04] Checking kubectl version
I0613 01:19:04.978] Client Version: version.Info{Major:"1", Minor:"16+", GitVersion:"v1.16.0-alpha.0.934+926cf4a58d8a98", GitCommit:"926cf4a58d8a98556a0e6ef15cf948ed00f60773", GitTreeState:"clean", BuildDate:"2019-06-13T01:17:07Z", GoVersion:"go1.12.1", Compiler:"gc", Platform:"linux/amd64"}
I0613 01:19:04.978] Server Version: version.Info{Major:"1", Minor:"16+", GitVersion:"v1.16.0-alpha.0.934+926cf4a58d8a98", GitCommit:"926cf4a58d8a98556a0e6ef15cf948ed00f60773", GitTreeState:"clean", BuildDate:"2019-06-13T01:17:28Z", GoVersion:"go1.12.1", Compiler:"gc", Platform:"linux/amd64"}
... skipping 28 lines ...
W0613 01:19:05.084] I0613 01:19:05.081629   52013 controllermanager.go:532] Started "serviceaccount"
W0613 01:19:05.085] I0613 01:19:05.082195   52013 controllermanager.go:532] Started "cronjob"
W0613 01:19:05.085] I0613 01:19:05.081759   52013 serviceaccounts_controller.go:117] Starting service account controller
W0613 01:19:05.085] I0613 01:19:05.082417   52013 controller_utils.go:1029] Waiting for caches to sync for service account controller
W0613 01:19:05.085] I0613 01:19:05.082262   52013 cronjob_controller.go:96] Starting CronJob Manager
W0613 01:19:05.085] I0613 01:19:05.083166   52013 node_lifecycle_controller.go:77] Sending events to api server
W0613 01:19:05.086] E0613 01:19:05.083632   52013 core.go:160] failed to start cloud node lifecycle controller: no cloud provider provided
W0613 01:19:05.086] W0613 01:19:05.083704   52013 controllermanager.go:524] Skipping "cloud-node-lifecycle"
W0613 01:19:05.086] I0613 01:19:05.083976   52013 controllermanager.go:532] Started "csrcleaner"
W0613 01:19:05.087] W0613 01:19:05.083991   52013 controllermanager.go:511] "bootstrapsigner" is disabled
W0613 01:19:05.087] W0613 01:19:05.083995   52013 controllermanager.go:511] "tokencleaner" is disabled
W0613 01:19:05.087] I0613 01:19:05.084040   52013 cleaner.go:81] Starting CSR cleaner controller
W0613 01:19:05.087] I0613 01:19:05.084923   52013 controllermanager.go:532] Started "persistentvolume-expander"
... skipping 33 lines ...
W0613 01:19:05.771] I0613 01:19:05.497472   52013 controllermanager.go:532] Started "horizontalpodautoscaling"
W0613 01:19:05.772] I0613 01:19:05.497644   52013 horizontal.go:156] Starting HPA controller
W0613 01:19:05.772] I0613 01:19:05.497672   52013 controller_utils.go:1029] Waiting for caches to sync for HPA controller
W0613 01:19:05.772] I0613 01:19:05.497917   52013 controllermanager.go:532] Started "pv-protection"
W0613 01:19:05.773] I0613 01:19:05.498267   52013 pv_protection_controller.go:82] Starting PV protection controller
W0613 01:19:05.773] I0613 01:19:05.499985   52013 controller_utils.go:1029] Waiting for caches to sync for PV protection controller
W0613 01:19:05.773] W0613 01:19:05.503798   52013 actual_state_of_world.go:506] Failed to update statusUpdateNeeded field in actual state of world: Failed to set statusUpdateNeeded to needed true, because nodeName="127.0.0.1" does not exist
W0613 01:19:05.773] I0613 01:19:05.544391   52013 controller_utils.go:1036] Caches are synced for certificate controller
W0613 01:19:05.774] I0613 01:19:05.559074   52013 controller_utils.go:1036] Caches are synced for daemon sets controller
W0613 01:19:05.774] I0613 01:19:05.559636   52013 controller_utils.go:1036] Caches are synced for ReplicaSet controller
W0613 01:19:05.774] I0613 01:19:05.566408   52013 controller_utils.go:1036] Caches are synced for taint controller
W0613 01:19:05.774] I0613 01:19:05.566824   52013 node_lifecycle_controller.go:1159] Initializing eviction metric for zone: 
W0613 01:19:05.775] I0613 01:19:05.567010   52013 node_lifecycle_controller.go:1009] Controller detected that all Nodes are not-Ready. Entering master disruption mode.
... skipping 2 lines ...
W0613 01:19:05.776] I0613 01:19:05.570244   52013 controller_utils.go:1036] Caches are synced for PVC protection controller
W0613 01:19:05.776] I0613 01:19:05.570642   52013 controller_utils.go:1036] Caches are synced for ClusterRoleAggregator controller
W0613 01:19:05.776] I0613 01:19:05.570835   52013 controller_utils.go:1036] Caches are synced for GC controller
W0613 01:19:05.776] I0613 01:19:05.575907   52013 controller_utils.go:1036] Caches are synced for TTL controller
W0613 01:19:05.777] I0613 01:19:05.575907   52013 controller_utils.go:1036] Caches are synced for deployment controller
W0613 01:19:05.777] I0613 01:19:05.586339   52013 controller_utils.go:1036] Caches are synced for ReplicationController controller
W0613 01:19:05.777] E0613 01:19:05.591481   52013 clusterroleaggregation_controller.go:180] admin failed with : Operation cannot be fulfilled on clusterroles.rbac.authorization.k8s.io "admin": the object has been modified; please apply your changes to the latest version and try again
W0613 01:19:05.778] E0613 01:19:05.591709   52013 clusterroleaggregation_controller.go:180] edit failed with : Operation cannot be fulfilled on clusterroles.rbac.authorization.k8s.io "edit": the object has been modified; please apply your changes to the latest version and try again
W0613 01:19:05.778] I0613 01:19:05.596782   52013 controller_utils.go:1036] Caches are synced for job controller
W0613 01:19:05.778] I0613 01:19:05.597966   52013 controller_utils.go:1036] Caches are synced for HPA controller
W0613 01:19:05.778] I0613 01:19:05.757690   52013 controller_utils.go:1036] Caches are synced for endpoint controller
W0613 01:19:05.875] I0613 01:19:05.874791   52013 controller_utils.go:1036] Caches are synced for stateful set controller
W0613 01:19:05.948] I0613 01:19:05.947635   52013 controller_utils.go:1036] Caches are synced for attach detach controller
W0613 01:19:05.975] I0613 01:19:05.974739   52013 controller_utils.go:1036] Caches are synced for disruption controller
... skipping 71 lines ...
I0613 01:19:09.121] +++ working dir: /go/src/k8s.io/kubernetes
I0613 01:19:09.123] +++ command: run_RESTMapper_evaluation_tests
I0613 01:19:09.135] +++ [0613 01:19:09] Creating namespace namespace-1560388749-14889
I0613 01:19:09.212] namespace/namespace-1560388749-14889 created
I0613 01:19:09.287] Context "test" modified.
I0613 01:19:09.294] +++ [0613 01:19:09] Testing RESTMapper
I0613 01:19:09.404] +++ [0613 01:19:09] "kubectl get unknownresourcetype" returns error as expected: error: the server doesn't have a resource type "unknownresourcetype"
I0613 01:19:09.419] +++ exit code: 0
I0613 01:19:09.533] NAME                              SHORTNAMES   APIGROUP                       NAMESPACED   KIND
I0613 01:19:09.533] bindings                                                                      true         Binding
I0613 01:19:09.533] componentstatuses                 cs                                          false        ComponentStatus
I0613 01:19:09.534] configmaps                        cm                                          true         ConfigMap
I0613 01:19:09.534] endpoints                         ep                                          true         Endpoints
... skipping 661 lines ...
I0613 01:19:29.310] (Bpoddisruptionbudget.policy/test-pdb-3 created
I0613 01:19:29.402] core.sh:251: Successful get pdb/test-pdb-3 --namespace=test-kubectl-describe-pod {{.spec.maxUnavailable}}: 2
I0613 01:19:29.483] (Bpoddisruptionbudget.policy/test-pdb-4 created
I0613 01:19:29.575] core.sh:255: Successful get pdb/test-pdb-4 --namespace=test-kubectl-describe-pod {{.spec.maxUnavailable}}: 50%
I0613 01:19:29.746] (Bcore.sh:261: Successful get pods --namespace=test-kubectl-describe-pod {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:19:29.952] (Bpod/env-test-pod created
W0613 01:19:30.053] error: resource(s) were provided, but no name, label selector, or --all flag specified
W0613 01:19:30.053] error: setting 'all' parameter but found a non empty selector. 
W0613 01:19:30.053] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
W0613 01:19:30.053] I0613 01:19:28.979195   48662 controller.go:606] quota admission added evaluator for: poddisruptionbudgets.policy
W0613 01:19:30.054] error: min-available and max-unavailable cannot be both specified
I0613 01:19:30.156] core.sh:264: Successful describe pods --namespace=test-kubectl-describe-pod env-test-pod:
I0613 01:19:30.156] Name:         env-test-pod
I0613 01:19:30.156] Namespace:    test-kubectl-describe-pod
I0613 01:19:30.156] Priority:     0
I0613 01:19:30.156] Node:         <none>
I0613 01:19:30.156] Labels:       <none>
... skipping 142 lines ...
I0613 01:19:42.255] (Bservice "modified" deleted
I0613 01:19:42.344] replicationcontroller "modified" deleted
I0613 01:19:42.624] core.sh:434: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:19:42.795] (Bpod/valid-pod created
I0613 01:19:42.899] core.sh:438: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0613 01:19:43.057] (BSuccessful
I0613 01:19:43.057] message:Error from server: cannot restore map from string
I0613 01:19:43.057] has:cannot restore map from string
I0613 01:19:43.146] Successful
I0613 01:19:43.147] message:pod/valid-pod patched (no change)
I0613 01:19:43.147] has:patched (no change)
I0613 01:19:43.235] pod/valid-pod patched
I0613 01:19:43.332] core.sh:455: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: nginx:
... skipping 5 lines ...
I0613 01:19:43.850] (Bpod/valid-pod patched
I0613 01:19:43.944] core.sh:470: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: changed-with-yaml:
I0613 01:19:44.021] (Bpod/valid-pod patched
I0613 01:19:44.111] core.sh:475: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:3.1:
I0613 01:19:44.271] (Bpod/valid-pod patched
I0613 01:19:44.363] core.sh:491: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: nginx:
I0613 01:19:44.535] (B+++ [0613 01:19:44] "kubectl patch with resourceVersion 497" returns error as expected: Error from server (Conflict): Operation cannot be fulfilled on pods "valid-pod": the object has been modified; please apply your changes to the latest version and try again
W0613 01:19:44.636] E0613 01:19:43.049022   48662 status.go:71] apiserver received an error that is not an metav1.Status: &errors.errorString{s:"cannot restore map from string"}
I0613 01:19:44.794] pod "valid-pod" deleted
I0613 01:19:44.806] pod/valid-pod replaced
I0613 01:19:44.904] core.sh:515: Successful get pod valid-pod {{(index .spec.containers 0).name}}: replaced-k8s-serve-hostname
I0613 01:19:45.081] (BSuccessful
I0613 01:19:45.081] message:error: --grace-period must have --force specified
I0613 01:19:45.082] has:\-\-grace-period must have \-\-force specified
I0613 01:19:45.255] Successful
I0613 01:19:45.255] message:error: --timeout must have --force specified
I0613 01:19:45.255] has:\-\-timeout must have \-\-force specified
I0613 01:19:45.420] node/node-v1-test created
W0613 01:19:45.522] W0613 01:19:45.420450   52013 actual_state_of_world.go:506] Failed to update statusUpdateNeeded field in actual state of world: Failed to set statusUpdateNeeded to needed true, because nodeName="node-v1-test" does not exist
W0613 01:19:45.571] I0613 01:19:45.570384   52013 event.go:258] Event(v1.ObjectReference{Kind:"Node", Namespace:"", Name:"node-v1-test", UID:"d6fe369a-a870-413f-831b-ad6e679181f8", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'RegisteredNode' Node node-v1-test event: Registered Node node-v1-test in Controller
I0613 01:19:45.672] node/node-v1-test replaced
I0613 01:19:45.692] core.sh:552: Successful get node node-v1-test {{.metadata.annotations.a}}: b
I0613 01:19:45.768] (Bnode "node-v1-test" deleted
I0613 01:19:45.868] core.sh:559: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: nginx:
I0613 01:19:46.148] (Bcore.sh:562: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: k8s.gcr.io/serve_hostname:
... skipping 58 lines ...
I0613 01:19:51.469] save-config.sh:31: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:19:51.633] (Bpod/test-pod created
W0613 01:19:51.733] Edit cancelled, no changes made.
W0613 01:19:51.734] Edit cancelled, no changes made.
W0613 01:19:51.734] Edit cancelled, no changes made.
W0613 01:19:51.734] Edit cancelled, no changes made.
W0613 01:19:51.734] error: 'name' already has a value (valid-pod), and --overwrite is false
W0613 01:19:51.734] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
W0613 01:19:51.734] Warning: kubectl apply should be used on resource created by either kubectl create --save-config or kubectl apply
W0613 01:19:51.735] I0613 01:19:50.571153   52013 event.go:258] Event(v1.ObjectReference{Kind:"Node", Namespace:"", Name:"node-v1-test", UID:"d6fe369a-a870-413f-831b-ad6e679181f8", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'RemovingNode' Node node-v1-test event: Removing Node node-v1-test from Controller
I0613 01:19:51.835] pod "test-pod" deleted
I0613 01:19:51.835] +++ [0613 01:19:51] Creating namespace namespace-1560388791-21122
I0613 01:19:51.876] namespace/namespace-1560388791-21122 created
... skipping 42 lines ...
I0613 01:19:55.166] +++ Running case: test-cmd.run_kubectl_create_error_tests 
I0613 01:19:55.168] +++ working dir: /go/src/k8s.io/kubernetes
I0613 01:19:55.171] +++ command: run_kubectl_create_error_tests
I0613 01:19:55.184] +++ [0613 01:19:55] Creating namespace namespace-1560388795-11065
I0613 01:19:55.254] namespace/namespace-1560388795-11065 created
I0613 01:19:55.324] Context "test" modified.
I0613 01:19:55.330] +++ [0613 01:19:55] Testing kubectl create with error
W0613 01:19:55.431] Error: must specify one of -f and -k
W0613 01:19:55.432] 
W0613 01:19:55.432] Create a resource from a file or from stdin.
W0613 01:19:55.432] 
W0613 01:19:55.432]  JSON and YAML formats are accepted.
W0613 01:19:55.433] 
W0613 01:19:55.433] Examples:
... skipping 41 lines ...
W0613 01:19:55.445] 
W0613 01:19:55.445] Usage:
W0613 01:19:55.445]   kubectl create -f FILENAME [options]
W0613 01:19:55.445] 
W0613 01:19:55.446] Use "kubectl <command> --help" for more information about a given command.
W0613 01:19:55.446] Use "kubectl options" for a list of global command-line options (applies to all commands).
I0613 01:19:55.574] +++ [0613 01:19:55] "kubectl create with empty string list returns error as expected: error: error validating "hack/testdata/invalid-rc-with-empty-args.yaml": error validating data: ValidationError(ReplicationController.spec.template.spec.containers[0].args): unknown object type "nil" in ReplicationController.spec.template.spec.containers[0].args[0]; if you choose to ignore these errors, turn validation off with --validate=false
W0613 01:19:55.674] kubectl convert is DEPRECATED and will be removed in a future version.
W0613 01:19:55.675] In order to convert, kubectl apply the object to the cluster, then kubectl get at the desired version.
I0613 01:19:55.776] +++ exit code: 0
I0613 01:19:55.798] Recording: run_kubectl_apply_tests
I0613 01:19:55.799] Running command: run_kubectl_apply_tests
I0613 01:19:55.820] 
... skipping 19 lines ...
W0613 01:19:57.994] I0613 01:19:57.993592   48662 client.go:354] parsed scheme: ""
W0613 01:19:57.994] I0613 01:19:57.993647   48662 client.go:354] scheme "" not registered, fallback to default scheme
W0613 01:19:57.994] I0613 01:19:57.993699   48662 asm_amd64.s:1337] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0  <nil>}]
W0613 01:19:57.994] I0613 01:19:57.993756   48662 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
W0613 01:19:57.995] I0613 01:19:57.994301   48662 asm_amd64.s:1337] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
W0613 01:19:57.997] I0613 01:19:57.997150   48662 controller.go:606] quota admission added evaluator for: resources.mygroup.example.com
W0613 01:19:58.090] Error from server (NotFound): resources.mygroup.example.com "myobj" not found
I0613 01:19:58.191] kind.mygroup.example.com/myobj serverside-applied (server dry run)
I0613 01:19:58.191] customresourcedefinition.apiextensions.k8s.io "resources.mygroup.example.com" deleted
I0613 01:19:58.205] +++ exit code: 0
I0613 01:19:58.240] Recording: run_kubectl_run_tests
I0613 01:19:58.241] Running command: run_kubectl_run_tests
I0613 01:19:58.261] 
... skipping 94 lines ...
I0613 01:20:00.685] Context "test" modified.
I0613 01:20:00.694] +++ [0613 01:20:00] Testing kubectl create filter
I0613 01:20:00.785] create.sh:30: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:20:00.957] (Bpod/selector-test-pod created
I0613 01:20:01.062] create.sh:34: Successful get pods selector-test-pod {{.metadata.labels.name}}: selector-test-pod
I0613 01:20:01.147] (BSuccessful
I0613 01:20:01.147] message:Error from server (NotFound): pods "selector-test-pod-dont-apply" not found
I0613 01:20:01.147] has:pods "selector-test-pod-dont-apply" not found
I0613 01:20:01.227] pod "selector-test-pod" deleted
I0613 01:20:01.248] +++ exit code: 0
I0613 01:20:01.280] Recording: run_kubectl_apply_deployments_tests
I0613 01:20:01.281] Running command: run_kubectl_apply_deployments_tests
I0613 01:20:01.302] 
... skipping 36 lines ...
I0613 01:20:03.395] (Bapps.sh:142: Successful get replicasets {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:20:03.480] (Bapps.sh:143: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:20:03.565] (Bapps.sh:147: Successful get deployments {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:20:03.736] (Bdeployment.extensions/nginx created
I0613 01:20:03.841] apps.sh:151: Successful get deployment nginx {{.metadata.name}}: nginx
I0613 01:20:08.133] (BSuccessful
I0613 01:20:08.133] message:Error from server (Conflict): error when applying patch:
I0613 01:20:08.134] {"metadata":{"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"extensions/v1beta1\",\"kind\":\"Deployment\",\"metadata\":{\"annotations\":{},\"labels\":{\"name\":\"nginx\"},\"name\":\"nginx\",\"namespace\":\"namespace-1560388801-21636\",\"resourceVersion\":\"99\"},\"spec\":{\"replicas\":3,\"selector\":{\"matchLabels\":{\"name\":\"nginx2\"}},\"template\":{\"metadata\":{\"labels\":{\"name\":\"nginx2\"}},\"spec\":{\"containers\":[{\"image\":\"k8s.gcr.io/nginx:test-cmd\",\"name\":\"nginx\",\"ports\":[{\"containerPort\":80}]}]}}}}\n"},"resourceVersion":"99"},"spec":{"selector":{"matchLabels":{"name":"nginx2"}},"template":{"metadata":{"labels":{"name":"nginx2"}}}}}
I0613 01:20:08.134] to:
I0613 01:20:08.134] Resource: "extensions/v1beta1, Resource=deployments", GroupVersionKind: "extensions/v1beta1, Kind=Deployment"
I0613 01:20:08.135] Name: "nginx", Namespace: "namespace-1560388801-21636"
I0613 01:20:08.137] Object: &{map["apiVersion":"extensions/v1beta1" "kind":"Deployment" "metadata":map["annotations":map["deployment.kubernetes.io/revision":"1" "kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"extensions/v1beta1\",\"kind\":\"Deployment\",\"metadata\":{\"annotations\":{},\"labels\":{\"name\":\"nginx\"},\"name\":\"nginx\",\"namespace\":\"namespace-1560388801-21636\"},\"spec\":{\"replicas\":3,\"template\":{\"metadata\":{\"labels\":{\"name\":\"nginx1\"}},\"spec\":{\"containers\":[{\"image\":\"k8s.gcr.io/nginx:test-cmd\",\"name\":\"nginx\",\"ports\":[{\"containerPort\":80}]}]}}}}\n"] "creationTimestamp":"2019-06-13T01:20:03Z" "generation":'\x01' "labels":map["name":"nginx"] "managedFields":[map["apiVersion":"apps/v1" "fields":map["f:metadata":map["f:annotations":map["f:deployment.kubernetes.io/revision":map[]]] "f:status":map["f:conditions":map[".":map[] "k:{\"type\":\"Available\"}":map[".":map[] "f:lastTransitionTime":map[] "f:lastUpdateTime":map[] "f:message":map[] "f:reason":map[] "f:status":map[] "f:type":map[]]] "f:observedGeneration":map[] "f:replicas":map[] "f:unavailableReplicas":map[] "f:updatedReplicas":map[]]] "manager":"kube-controller-manager" "operation":"Update" "time":"2019-06-13T01:20:03Z"] map["apiVersion":"extensions/v1beta1" "fields":map["f:metadata":map["f:annotations":map[".":map[] "f:kubectl.kubernetes.io/last-applied-configuration":map[]] "f:labels":map[".":map[] "f:name":map[]]] "f:spec":map["f:progressDeadlineSeconds":map[] "f:replicas":map[] "f:revisionHistoryLimit":map[] "f:selector":map[".":map[] "f:matchLabels":map[".":map[] "f:name":map[]]] "f:strategy":map["f:rollingUpdate":map[".":map[] "f:maxSurge":map[] "f:maxUnavailable":map[]] "f:type":map[]] "f:template":map["f:metadata":map["f:labels":map[".":map[] "f:name":map[]]] "f:spec":map["f:containers":map["k:{\"name\":\"nginx\"}":map[".":map[] "f:image":map[] "f:imagePullPolicy":map[] "f:name":map[] "f:ports":map[".":map[] "k:{\"containerPort\":80,\"protocol\":\"TCP\"}":map[".":map[] "f:containerPort":map[] "f:protocol":map[]]] "f:resources":map[] "f:terminationMessagePath":map[] "f:terminationMessagePolicy":map[]]] "f:dnsPolicy":map[] "f:restartPolicy":map[] "f:schedulerName":map[] "f:securityContext":map[] "f:terminationGracePeriodSeconds":map[]]]]] "manager":"kubectl" "operation":"Update" "time":"2019-06-13T01:20:03Z"]] "name":"nginx" "namespace":"namespace-1560388801-21636" "resourceVersion":"607" "selfLink":"/apis/extensions/v1beta1/namespaces/namespace-1560388801-21636/deployments/nginx" "uid":"f41e8071-98fc-4089-838b-106adbdc84a5"] "spec":map["progressDeadlineSeconds":%!q(int64=+2147483647) "replicas":'\x03' "revisionHistoryLimit":%!q(int64=+2147483647) "selector":map["matchLabels":map["name":"nginx1"]] "strategy":map["rollingUpdate":map["maxSurge":'\x01' "maxUnavailable":'\x01'] "type":"RollingUpdate"] "template":map["metadata":map["creationTimestamp":<nil> "labels":map["name":"nginx1"]] "spec":map["containers":[map["image":"k8s.gcr.io/nginx:test-cmd" "imagePullPolicy":"IfNotPresent" "name":"nginx" "ports":[map["containerPort":'P' "protocol":"TCP"]] "resources":map[] "terminationMessagePath":"/dev/termination-log" "terminationMessagePolicy":"File"]] "dnsPolicy":"ClusterFirst" "restartPolicy":"Always" "schedulerName":"default-scheduler" "securityContext":map[] "terminationGracePeriodSeconds":'\x1e']]] "status":map["conditions":[map["lastTransitionTime":"2019-06-13T01:20:03Z" "lastUpdateTime":"2019-06-13T01:20:03Z" "message":"Deployment does not have minimum availability." "reason":"MinimumReplicasUnavailable" "status":"False" "type":"Available"]] "observedGeneration":'\x01' "replicas":'\x03' "unavailableReplicas":'\x03' "updatedReplicas":'\x03']]}
I0613 01:20:08.138] for: "hack/testdata/deployment-label-change2.yaml": Operation cannot be fulfilled on deployments.extensions "nginx": the object has been modified; please apply your changes to the latest version and try again
I0613 01:20:08.138] has:Error from server (Conflict)
W0613 01:20:08.239] I0613 01:20:03.741478   52013 event.go:258] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1560388801-21636", Name:"nginx", UID:"f41e8071-98fc-4089-838b-106adbdc84a5", APIVersion:"apps/v1", ResourceVersion:"594", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-54d5cbd75f to 3
W0613 01:20:08.239] I0613 01:20:03.745743   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1560388801-21636", Name:"nginx-54d5cbd75f", UID:"7381ce7a-124f-48af-bdcd-ae44a304eb47", APIVersion:"apps/v1", ResourceVersion:"595", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-54d5cbd75f-kj7nd
W0613 01:20:08.240] I0613 01:20:03.749237   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1560388801-21636", Name:"nginx-54d5cbd75f", UID:"7381ce7a-124f-48af-bdcd-ae44a304eb47", APIVersion:"apps/v1", ResourceVersion:"595", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-54d5cbd75f-b5dw4
W0613 01:20:08.240] I0613 01:20:03.750028   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1560388801-21636", Name:"nginx-54d5cbd75f", UID:"7381ce7a-124f-48af-bdcd-ae44a304eb47", APIVersion:"apps/v1", ResourceVersion:"595", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-54d5cbd75f-kgl4n
W0613 01:20:09.567] I0613 01:20:09.567307   52013 horizontal.go:340] Horizontal Pod Autoscaler frontend has been deleted in namespace-1560388792-29249
I0613 01:20:13.421] deployment.extensions/nginx configured
... skipping 197 lines ...
I0613 01:20:21.032] +++ [0613 01:20:21] Creating namespace namespace-1560388821-27322
I0613 01:20:21.110] namespace/namespace-1560388821-27322 created
I0613 01:20:21.204] Context "test" modified.
I0613 01:20:21.211] +++ [0613 01:20:21] Testing kubectl get
I0613 01:20:21.296] get.sh:29: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:20:21.387] (BSuccessful
I0613 01:20:21.388] message:Error from server (NotFound): pods "abc" not found
I0613 01:20:21.388] has:pods "abc" not found
I0613 01:20:21.477] get.sh:37: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:20:21.577] (BSuccessful
I0613 01:20:21.578] message:Error from server (NotFound): pods "abc" not found
I0613 01:20:21.578] has:pods "abc" not found
I0613 01:20:21.682] get.sh:45: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:20:21.781] (BSuccessful
I0613 01:20:21.781] message:{
I0613 01:20:21.782]     "apiVersion": "v1",
I0613 01:20:21.782]     "items": [],
... skipping 23 lines ...
I0613 01:20:22.166] has not:No resources found
I0613 01:20:22.250] Successful
I0613 01:20:22.250] message:NAME
I0613 01:20:22.250] has not:No resources found
I0613 01:20:22.351] get.sh:73: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:20:22.448] (BSuccessful
I0613 01:20:22.449] message:error: the server doesn't have a resource type "foobar"
I0613 01:20:22.449] has not:No resources found
I0613 01:20:22.550] Successful
I0613 01:20:22.551] message:No resources found.
I0613 01:20:22.551] has:No resources found
I0613 01:20:22.638] Successful
I0613 01:20:22.639] message:
I0613 01:20:22.639] has not:No resources found
I0613 01:20:22.731] Successful
I0613 01:20:22.731] message:No resources found.
I0613 01:20:22.731] has:No resources found
I0613 01:20:22.823] get.sh:93: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:20:22.913] (BSuccessful
I0613 01:20:22.913] message:Error from server (NotFound): pods "abc" not found
I0613 01:20:22.913] has:pods "abc" not found
I0613 01:20:22.915] FAIL!
I0613 01:20:22.915] message:Error from server (NotFound): pods "abc" not found
I0613 01:20:22.916] has not:List
I0613 01:20:22.916] 99 /go/src/k8s.io/kubernetes/test/cmd/../../test/cmd/get.sh
I0613 01:20:23.030] Successful
I0613 01:20:23.031] message:I0613 01:20:22.976573   62653 loader.go:359] Config loaded from file:  /tmp/tmp.A7KUUYjBfl/.kube/config
I0613 01:20:23.031] I0613 01:20:22.978288   62653 round_trippers.go:438] GET http://127.0.0.1:8080/version?timeout=32s 200 OK in 1 milliseconds
I0613 01:20:23.031] I0613 01:20:23.002745   62653 round_trippers.go:438] GET http://127.0.0.1:8080/api/v1/namespaces/default/pods 200 OK in 2 milliseconds
... skipping 888 lines ...
I0613 01:20:28.641] Successful
I0613 01:20:28.641] message:NAME    DATA   AGE
I0613 01:20:28.642] one     0      0s
I0613 01:20:28.642] three   0      0s
I0613 01:20:28.642] two     0      0s
I0613 01:20:28.642] STATUS    REASON          MESSAGE
I0613 01:20:28.642] Failure   InternalError   an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
I0613 01:20:28.642] has not:watch is only supported on individual resources
I0613 01:20:29.735] Successful
I0613 01:20:29.735] message:STATUS    REASON          MESSAGE
I0613 01:20:29.736] Failure   InternalError   an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
I0613 01:20:29.736] has not:watch is only supported on individual resources
I0613 01:20:29.740] +++ [0613 01:20:29] Creating namespace namespace-1560388829-5703
I0613 01:20:29.809] namespace/namespace-1560388829-5703 created
I0613 01:20:29.876] Context "test" modified.
I0613 01:20:29.968] get.sh:157: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:20:30.135] (Bpod/valid-pod created
... skipping 104 lines ...
I0613 01:20:30.229] }
I0613 01:20:30.317] get.sh:162: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0613 01:20:30.549] (B<no value>Successful
I0613 01:20:30.549] message:valid-pod:
I0613 01:20:30.549] has:valid-pod:
I0613 01:20:30.626] Successful
I0613 01:20:30.626] message:error: error executing jsonpath "{.missing}": Error executing template: missing is not found. Printing more information for debugging the template:
I0613 01:20:30.626] 	template was:
I0613 01:20:30.626] 		{.missing}
I0613 01:20:30.627] 	object given to jsonpath engine was:
I0613 01:20:30.629] 		map[string]interface {}{"apiVersion":"v1", "kind":"Pod", "metadata":map[string]interface {}{"creationTimestamp":"2019-06-13T01:20:30Z", "labels":map[string]interface {}{"name":"valid-pod"}, "managedFields":[]interface {}{map[string]interface {}{"apiVersion":"v1", "fields":map[string]interface {}{"f:metadata":map[string]interface {}{"f:labels":map[string]interface {}{".":map[string]interface {}{}, "f:name":map[string]interface {}{}}}, "f:spec":map[string]interface {}{"f:containers":map[string]interface {}{"k:{\"name\":\"kubernetes-serve-hostname\"}":map[string]interface {}{".":map[string]interface {}{}, "f:image":map[string]interface {}{}, "f:imagePullPolicy":map[string]interface {}{}, "f:name":map[string]interface {}{}, "f:resources":map[string]interface {}{".":map[string]interface {}{}, "f:limits":map[string]interface {}{".":map[string]interface {}{}, "f:cpu":map[string]interface {}{}, "f:memory":map[string]interface {}{}}, "f:requests":map[string]interface {}{".":map[string]interface {}{}, "f:cpu":map[string]interface {}{}, "f:memory":map[string]interface {}{}}}, "f:terminationMessagePath":map[string]interface {}{}, "f:terminationMessagePolicy":map[string]interface {}{}}}, "f:dnsPolicy":map[string]interface {}{}, "f:enableServiceLinks":map[string]interface {}{}, "f:priority":map[string]interface {}{}, "f:restartPolicy":map[string]interface {}{}, "f:schedulerName":map[string]interface {}{}, "f:securityContext":map[string]interface {}{}, "f:terminationGracePeriodSeconds":map[string]interface {}{}}}, "manager":"kubectl", "operation":"Update", "time":"2019-06-13T01:20:30Z"}}, "name":"valid-pod", "namespace":"namespace-1560388829-5703", "resourceVersion":"706", "selfLink":"/api/v1/namespaces/namespace-1560388829-5703/pods/valid-pod", "uid":"f266c1f3-dae1-4008-9e37-29718bec9cd8"}, "spec":map[string]interface {}{"containers":[]interface {}{map[string]interface {}{"image":"k8s.gcr.io/serve_hostname", "imagePullPolicy":"Always", "name":"kubernetes-serve-hostname", "resources":map[string]interface {}{"limits":map[string]interface {}{"cpu":"1", "memory":"512Mi"}, "requests":map[string]interface {}{"cpu":"1", "memory":"512Mi"}}, "terminationMessagePath":"/dev/termination-log", "terminationMessagePolicy":"File"}}, "dnsPolicy":"ClusterFirst", "enableServiceLinks":true, "priority":0, "restartPolicy":"Always", "schedulerName":"default-scheduler", "securityContext":map[string]interface {}{}, "terminationGracePeriodSeconds":30}, "status":map[string]interface {}{"phase":"Pending", "qosClass":"Guaranteed"}}
I0613 01:20:30.629] has:missing is not found
I0613 01:20:30.713] Successful
I0613 01:20:30.713] message:Error executing template: template: output:1:2: executing "output" at <.missing>: map has no entry for key "missing". Printing more information for debugging the template:
I0613 01:20:30.713] 	template was:
I0613 01:20:30.713] 		{{.missing}}
I0613 01:20:30.714] 	raw data was:
I0613 01:20:30.715] 		{"apiVersion":"v1","kind":"Pod","metadata":{"creationTimestamp":"2019-06-13T01:20:30Z","labels":{"name":"valid-pod"},"managedFields":[{"apiVersion":"v1","fields":{"f:metadata":{"f:labels":{".":{},"f:name":{}}},"f:spec":{"f:containers":{"k:{\"name\":\"kubernetes-serve-hostname\"}":{".":{},"f:image":{},"f:imagePullPolicy":{},"f:name":{},"f:resources":{".":{},"f:limits":{".":{},"f:cpu":{},"f:memory":{}},"f:requests":{".":{},"f:cpu":{},"f:memory":{}}},"f:terminationMessagePath":{},"f:terminationMessagePolicy":{}}},"f:dnsPolicy":{},"f:enableServiceLinks":{},"f:priority":{},"f:restartPolicy":{},"f:schedulerName":{},"f:securityContext":{},"f:terminationGracePeriodSeconds":{}}},"manager":"kubectl","operation":"Update","time":"2019-06-13T01:20:30Z"}],"name":"valid-pod","namespace":"namespace-1560388829-5703","resourceVersion":"706","selfLink":"/api/v1/namespaces/namespace-1560388829-5703/pods/valid-pod","uid":"f266c1f3-dae1-4008-9e37-29718bec9cd8"},"spec":{"containers":[{"image":"k8s.gcr.io/serve_hostname","imagePullPolicy":"Always","name":"kubernetes-serve-hostname","resources":{"limits":{"cpu":"1","memory":"512Mi"},"requests":{"cpu":"1","memory":"512Mi"}},"terminationMessagePath":"/dev/termination-log","terminationMessagePolicy":"File"}],"dnsPolicy":"ClusterFirst","enableServiceLinks":true,"priority":0,"restartPolicy":"Always","schedulerName":"default-scheduler","securityContext":{},"terminationGracePeriodSeconds":30},"status":{"phase":"Pending","qosClass":"Guaranteed"}}
I0613 01:20:30.715] 	object given to template engine was:
I0613 01:20:30.716] 		map[apiVersion:v1 kind:Pod metadata:map[creationTimestamp:2019-06-13T01:20:30Z labels:map[name:valid-pod] managedFields:[map[apiVersion:v1 fields:map[f:metadata:map[f:labels:map[.:map[] f:name:map[]]] f:spec:map[f:containers:map[k:{"name":"kubernetes-serve-hostname"}:map[.:map[] f:image:map[] f:imagePullPolicy:map[] f:name:map[] f:resources:map[.:map[] f:limits:map[.:map[] f:cpu:map[] f:memory:map[]] f:requests:map[.:map[] f:cpu:map[] f:memory:map[]]] f:terminationMessagePath:map[] f:terminationMessagePolicy:map[]]] f:dnsPolicy:map[] f:enableServiceLinks:map[] f:priority:map[] f:restartPolicy:map[] f:schedulerName:map[] f:securityContext:map[] f:terminationGracePeriodSeconds:map[]]] manager:kubectl operation:Update time:2019-06-13T01:20:30Z]] name:valid-pod namespace:namespace-1560388829-5703 resourceVersion:706 selfLink:/api/v1/namespaces/namespace-1560388829-5703/pods/valid-pod uid:f266c1f3-dae1-4008-9e37-29718bec9cd8] spec:map[containers:[map[image:k8s.gcr.io/serve_hostname imagePullPolicy:Always name:kubernetes-serve-hostname resources:map[limits:map[cpu:1 memory:512Mi] requests:map[cpu:1 memory:512Mi]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:File]] dnsPolicy:ClusterFirst enableServiceLinks:true priority:0 restartPolicy:Always schedulerName:default-scheduler securityContext:map[] terminationGracePeriodSeconds:30] status:map[phase:Pending qosClass:Guaranteed]]
I0613 01:20:30.716] has:map has no entry for key "missing"
W0613 01:20:30.816] error: error executing template "{{.missing}}": template: output:1:2: executing "output" at <.missing>: map has no entry for key "missing"
I0613 01:20:31.801] Successful
I0613 01:20:31.801] message:NAME        READY   STATUS    RESTARTS   AGE
I0613 01:20:31.802] valid-pod   0/1     Pending   0          0s
I0613 01:20:31.802] STATUS      REASON          MESSAGE
I0613 01:20:31.802] Failure     InternalError   an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
I0613 01:20:31.802] has:STATUS
I0613 01:20:31.803] Successful
I0613 01:20:31.803] message:NAME        READY   STATUS    RESTARTS   AGE
I0613 01:20:31.803] valid-pod   0/1     Pending   0          0s
I0613 01:20:31.803] STATUS      REASON          MESSAGE
I0613 01:20:31.803] Failure     InternalError   an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
I0613 01:20:31.803] has:valid-pod
I0613 01:20:32.879] Successful
I0613 01:20:32.879] message:pod/valid-pod
I0613 01:20:32.879] has not:STATUS
I0613 01:20:32.880] Successful
I0613 01:20:32.881] message:pod/valid-pod
... skipping 142 lines ...
I0613 01:20:33.980]   terminationGracePeriodSeconds: 30
I0613 01:20:33.980] status:
I0613 01:20:33.980]   phase: Pending
I0613 01:20:33.980]   qosClass: Guaranteed
I0613 01:20:33.980] has:name: valid-pod
I0613 01:20:34.041] Successful
I0613 01:20:34.041] message:Error from server (NotFound): pods "invalid-pod" not found
I0613 01:20:34.042] has:"invalid-pod" not found
I0613 01:20:34.117] pod "valid-pod" deleted
I0613 01:20:34.204] get.sh:200: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:20:34.367] (Bpod/redis-master created
I0613 01:20:34.372] pod/valid-pod created
I0613 01:20:34.461] Successful
... skipping 283 lines ...
I0613 01:20:39.871] +++ command: run_kubectl_exec_pod_tests
I0613 01:20:39.882] +++ [0613 01:20:39] Creating namespace namespace-1560388839-8789
I0613 01:20:39.953] namespace/namespace-1560388839-8789 created
I0613 01:20:40.023] Context "test" modified.
I0613 01:20:40.029] +++ [0613 01:20:40] Testing kubectl exec POD COMMAND
I0613 01:20:40.111] Successful
I0613 01:20:40.112] message:Error from server (NotFound): pods "abc" not found
I0613 01:20:40.112] has:pods "abc" not found
I0613 01:20:40.280] pod/test-pod created
I0613 01:20:40.382] Successful
I0613 01:20:40.382] message:Error from server (BadRequest): pod test-pod does not have a host assigned
I0613 01:20:40.382] has not:pods "test-pod" not found
I0613 01:20:40.384] Successful
I0613 01:20:40.384] message:Error from server (BadRequest): pod test-pod does not have a host assigned
I0613 01:20:40.384] has not:pod or type/name must be specified
I0613 01:20:40.465] pod "test-pod" deleted
I0613 01:20:40.486] +++ exit code: 0
I0613 01:20:40.521] Recording: run_kubectl_exec_resource_name_tests
I0613 01:20:40.521] Running command: run_kubectl_exec_resource_name_tests
I0613 01:20:40.544] 
... skipping 2 lines ...
I0613 01:20:40.549] +++ command: run_kubectl_exec_resource_name_tests
I0613 01:20:40.562] +++ [0613 01:20:40] Creating namespace namespace-1560388840-5965
I0613 01:20:40.632] namespace/namespace-1560388840-5965 created
I0613 01:20:40.703] Context "test" modified.
I0613 01:20:40.710] +++ [0613 01:20:40] Testing kubectl exec TYPE/NAME COMMAND
I0613 01:20:40.814] Successful
I0613 01:20:40.814] message:error: the server doesn't have a resource type "foo"
I0613 01:20:40.815] has:error:
I0613 01:20:40.900] Successful
I0613 01:20:40.900] message:Error from server (NotFound): deployments.extensions "bar" not found
I0613 01:20:40.900] has:"bar" not found
I0613 01:20:41.071] pod/test-pod created
I0613 01:20:41.256] replicaset.apps/frontend created
W0613 01:20:41.357] I0613 01:20:41.261007   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1560388840-5965", Name:"frontend", UID:"0ccb0fb2-188e-4102-a096-dd9ba622b4e1", APIVersion:"apps/v1", ResourceVersion:"821", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-zc2st
W0613 01:20:41.357] I0613 01:20:41.268562   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1560388840-5965", Name:"frontend", UID:"0ccb0fb2-188e-4102-a096-dd9ba622b4e1", APIVersion:"apps/v1", ResourceVersion:"821", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-lqqw6
W0613 01:20:41.357] I0613 01:20:41.268930   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1560388840-5965", Name:"frontend", UID:"0ccb0fb2-188e-4102-a096-dd9ba622b4e1", APIVersion:"apps/v1", ResourceVersion:"821", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-bz29j
I0613 01:20:41.458] configmap/test-set-env-config created
I0613 01:20:41.541] Successful
I0613 01:20:41.541] message:error: cannot attach to *v1.ConfigMap: selector for *v1.ConfigMap not implemented
I0613 01:20:41.542] has:not implemented
I0613 01:20:41.630] Successful
I0613 01:20:41.630] message:Error from server (BadRequest): pod test-pod does not have a host assigned
I0613 01:20:41.631] has not:not found
I0613 01:20:41.632] Successful
I0613 01:20:41.632] message:Error from server (BadRequest): pod test-pod does not have a host assigned
I0613 01:20:41.633] has not:pod or type/name must be specified
I0613 01:20:41.734] Successful
I0613 01:20:41.735] message:Error from server (BadRequest): pod frontend-bz29j does not have a host assigned
I0613 01:20:41.735] has not:not found
I0613 01:20:41.737] Successful
I0613 01:20:41.737] message:Error from server (BadRequest): pod frontend-bz29j does not have a host assigned
I0613 01:20:41.738] has not:pod or type/name must be specified
I0613 01:20:41.814] pod "test-pod" deleted
I0613 01:20:41.895] replicaset.extensions "frontend" deleted
I0613 01:20:41.980] configmap "test-set-env-config" deleted
I0613 01:20:42.000] +++ exit code: 0
I0613 01:20:42.035] Recording: run_create_secret_tests
I0613 01:20:42.035] Running command: run_create_secret_tests
I0613 01:20:42.056] 
I0613 01:20:42.058] +++ Running case: test-cmd.run_create_secret_tests 
I0613 01:20:42.061] +++ working dir: /go/src/k8s.io/kubernetes
I0613 01:20:42.063] +++ command: run_create_secret_tests
I0613 01:20:42.154] Successful
I0613 01:20:42.154] message:Error from server (NotFound): secrets "mysecret" not found
I0613 01:20:42.154] has:secrets "mysecret" not found
I0613 01:20:42.314] Successful
I0613 01:20:42.315] message:Error from server (NotFound): secrets "mysecret" not found
I0613 01:20:42.315] has:secrets "mysecret" not found
I0613 01:20:42.317] Successful
I0613 01:20:42.317] message:user-specified
I0613 01:20:42.317] has:user-specified
I0613 01:20:42.392] Successful
I0613 01:20:42.464] {"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"tester-create-cm","namespace":"default","selfLink":"/api/v1/namespaces/default/configmaps/tester-create-cm","uid":"2751ee79-8a9d-48aa-b07b-28d7cf871f24","resourceVersion":"841","creationTimestamp":"2019-06-13T01:20:42Z"}}
... skipping 164 lines ...
W0613 01:20:45.314] I0613 01:20:43.100205   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1560388840-5965", Name:"test-the-deployment-5f6d7c99fd", UID:"cc24920d-89f8-4ad5-a618-4469dee4783b", APIVersion:"apps/v1", ResourceVersion:"851", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: test-the-deployment-5f6d7c99fd-fntls
W0613 01:20:45.314] I0613 01:20:43.100251   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1560388840-5965", Name:"test-the-deployment-5f6d7c99fd", UID:"cc24920d-89f8-4ad5-a618-4469dee4783b", APIVersion:"apps/v1", ResourceVersion:"851", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: test-the-deployment-5f6d7c99fd-x458k
I0613 01:20:46.295] Successful
I0613 01:20:46.295] message:NAME        READY   STATUS    RESTARTS   AGE
I0613 01:20:46.295] valid-pod   0/1     Pending   0          1s
I0613 01:20:46.295] STATUS      REASON          MESSAGE
I0613 01:20:46.296] Failure     InternalError   an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
I0613 01:20:46.296] has:Timeout exceeded while reading body
I0613 01:20:46.381] Successful
I0613 01:20:46.381] message:NAME        READY   STATUS    RESTARTS   AGE
I0613 01:20:46.381] valid-pod   0/1     Pending   0          2s
I0613 01:20:46.381] has:valid-pod
I0613 01:20:46.454] Successful
I0613 01:20:46.455] message:error: Invalid timeout value. Timeout must be a single integer in seconds, or an integer followed by a corresponding time unit (e.g. 1s | 2m | 3h)
I0613 01:20:46.455] has:Invalid timeout value
I0613 01:20:46.538] pod "valid-pod" deleted
I0613 01:20:46.557] +++ exit code: 0
I0613 01:20:46.595] Recording: run_crd_tests
I0613 01:20:46.595] Running command: run_crd_tests
I0613 01:20:46.618] 
... skipping 221 lines ...
I0613 01:20:51.266] foo.company.com/test patched
I0613 01:20:51.363] crd.sh:237: Successful get foos/test {{.patched}}: value1
I0613 01:20:51.446] (Bfoo.company.com/test patched
I0613 01:20:51.547] crd.sh:239: Successful get foos/test {{.patched}}: value2
I0613 01:20:51.632] (Bfoo.company.com/test patched
I0613 01:20:51.719] crd.sh:241: Successful get foos/test {{.patched}}: <no value>
I0613 01:20:51.877] (B+++ [0613 01:20:51] "kubectl patch --local" returns error as expected for CustomResource: error: cannot apply strategic merge patch for company.com/v1, Kind=Foo locally, try --type merge
I0613 01:20:51.938] {
I0613 01:20:51.938]     "apiVersion": "company.com/v1",
I0613 01:20:51.938]     "kind": "Foo",
I0613 01:20:51.938]     "metadata": {
I0613 01:20:51.938]         "annotations": {
I0613 01:20:51.939]             "kubernetes.io/change-cause": "kubectl patch foos/test --server=http://127.0.0.1:8080 --match-server-version=true --patch={\"patched\":null} --type=merge --record=true"
... skipping 335 lines ...
I0613 01:21:14.409] (Bnamespace/non-native-resources created
I0613 01:21:14.586] bar.company.com/test created
I0613 01:21:14.679] crd.sh:456: Successful get bars {{len .items}}: 1
I0613 01:21:14.758] (Bnamespace "non-native-resources" deleted
I0613 01:21:19.995] crd.sh:459: Successful get bars {{len .items}}: 0
I0613 01:21:20.159] (Bcustomresourcedefinition.apiextensions.k8s.io "foos.company.com" deleted
W0613 01:21:20.260] Error from server (NotFound): namespaces "non-native-resources" not found
I0613 01:21:20.360] customresourcedefinition.apiextensions.k8s.io "bars.company.com" deleted
I0613 01:21:20.398] customresourcedefinition.apiextensions.k8s.io "resources.mygroup.example.com" deleted
I0613 01:21:20.508] customresourcedefinition.apiextensions.k8s.io "validfoos.company.com" deleted
I0613 01:21:20.542] +++ exit code: 0
I0613 01:21:20.612] Recording: run_cmd_with_img_tests
I0613 01:21:20.612] Running command: run_cmd_with_img_tests
... skipping 7 lines ...
I0613 01:21:20.845] +++ [0613 01:21:20] Testing cmd with image
I0613 01:21:20.942] Successful
I0613 01:21:20.942] message:deployment.apps/test1 created
I0613 01:21:20.942] has:deployment.apps/test1 created
I0613 01:21:21.025] deployment.extensions "test1" deleted
I0613 01:21:21.103] Successful
I0613 01:21:21.103] message:error: Invalid image name "InvalidImageName": invalid reference format
I0613 01:21:21.104] has:error: Invalid image name "InvalidImageName": invalid reference format
I0613 01:21:21.115] +++ exit code: 0
I0613 01:21:21.154] +++ [0613 01:21:21] Testing recursive resources
I0613 01:21:21.159] +++ [0613 01:21:21] Creating namespace namespace-1560388881-14989
I0613 01:21:21.233] namespace/namespace-1560388881-14989 created
I0613 01:21:21.304] Context "test" modified.
I0613 01:21:21.405] generic-resources.sh:202: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:21:21.727] (Bgeneric-resources.sh:206: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0613 01:21:21.730] (BSuccessful
I0613 01:21:21.731] message:pod/busybox0 created
I0613 01:21:21.731] pod/busybox1 created
I0613 01:21:21.731] error: error validating "hack/testdata/recursive/pod/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
I0613 01:21:21.732] has:error validating data: kind not set
I0613 01:21:21.816] generic-resources.sh:211: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0613 01:21:21.987] (Bgeneric-resources.sh:219: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: busybox:busybox:
I0613 01:21:21.989] (BSuccessful
I0613 01:21:21.990] message:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0613 01:21:21.990] has:Object 'Kind' is missing
I0613 01:21:22.088] generic-resources.sh:226: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0613 01:21:22.376] (Bgeneric-resources.sh:230: Successful get pods {{range.items}}{{.metadata.labels.status}}:{{end}}: replaced:replaced:
I0613 01:21:22.378] (BSuccessful
I0613 01:21:22.379] message:pod/busybox0 replaced
I0613 01:21:22.379] pod/busybox1 replaced
I0613 01:21:22.379] error: error validating "hack/testdata/recursive/pod-modify/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
I0613 01:21:22.379] has:error validating data: kind not set
I0613 01:21:22.463] generic-resources.sh:235: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0613 01:21:22.555] (BSuccessful
I0613 01:21:22.555] message:Name:         busybox0
I0613 01:21:22.555] Namespace:    namespace-1560388881-14989
I0613 01:21:22.555] Priority:     0
I0613 01:21:22.555] Node:         <none>
... skipping 153 lines ...
I0613 01:21:22.568] has:Object 'Kind' is missing
I0613 01:21:22.648] generic-resources.sh:245: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0613 01:21:22.817] (Bgeneric-resources.sh:249: Successful get pods {{range.items}}{{.metadata.annotations.annotatekey}}:{{end}}: annotatevalue:annotatevalue:
I0613 01:21:22.819] (BSuccessful
I0613 01:21:22.819] message:pod/busybox0 annotated
I0613 01:21:22.819] pod/busybox1 annotated
I0613 01:21:22.819] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0613 01:21:22.819] has:Object 'Kind' is missing
I0613 01:21:22.898] generic-resources.sh:254: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0613 01:21:23.200] (Bgeneric-resources.sh:258: Successful get pods {{range.items}}{{.metadata.labels.status}}:{{end}}: replaced:replaced:
I0613 01:21:23.202] (BSuccessful
I0613 01:21:23.202] message:Warning: kubectl apply should be used on resource created by either kubectl create --save-config or kubectl apply
I0613 01:21:23.203] pod/busybox0 configured
I0613 01:21:23.203] Warning: kubectl apply should be used on resource created by either kubectl create --save-config or kubectl apply
I0613 01:21:23.203] pod/busybox1 configured
I0613 01:21:23.203] error: error validating "hack/testdata/recursive/pod-modify/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
I0613 01:21:23.203] has:error validating data: kind not set
I0613 01:21:23.287] generic-resources.sh:264: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:21:23.436] (Bdeployment.apps/nginx created
I0613 01:21:23.537] generic-resources.sh:268: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx:
I0613 01:21:23.621] (Bgeneric-resources.sh:269: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
I0613 01:21:23.774] (Bgeneric-resources.sh:273: Successful get deployment nginx {{ .apiVersion }}: extensions/v1beta1
I0613 01:21:23.777] (BSuccessful
... skipping 42 lines ...
I0613 01:21:23.849] deployment.extensions "nginx" deleted
I0613 01:21:23.940] generic-resources.sh:280: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0613 01:21:24.098] (Bgeneric-resources.sh:284: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0613 01:21:24.100] (BSuccessful
I0613 01:21:24.101] message:kubectl convert is DEPRECATED and will be removed in a future version.
I0613 01:21:24.101] In order to convert, kubectl apply the object to the cluster, then kubectl get at the desired version.
I0613 01:21:24.101] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0613 01:21:24.102] has:Object 'Kind' is missing
I0613 01:21:24.183] generic-resources.sh:289: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0613 01:21:24.266] (BSuccessful
I0613 01:21:24.267] message:busybox0:busybox1:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0613 01:21:24.267] has:busybox0:busybox1:
I0613 01:21:24.268] Successful
I0613 01:21:24.268] message:busybox0:busybox1:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0613 01:21:24.268] has:Object 'Kind' is missing
I0613 01:21:24.353] generic-resources.sh:298: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0613 01:21:24.444] (Bpod/busybox0 labeled pod/busybox1 labeled error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0613 01:21:24.525] generic-resources.sh:303: Successful get pods {{range.items}}{{.metadata.labels.mylabel}}:{{end}}: myvalue:myvalue:
I0613 01:21:24.527] (BSuccessful
I0613 01:21:24.527] message:pod/busybox0 labeled
I0613 01:21:24.527] pod/busybox1 labeled
I0613 01:21:24.527] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0613 01:21:24.528] has:Object 'Kind' is missing
I0613 01:21:24.606] generic-resources.sh:308: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0613 01:21:24.698] (Bpod/busybox0 patched pod/busybox1 patched error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0613 01:21:24.785] generic-resources.sh:313: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: prom/busybox:prom/busybox:
I0613 01:21:24.787] (BSuccessful
I0613 01:21:24.788] message:pod/busybox0 patched
I0613 01:21:24.788] pod/busybox1 patched
I0613 01:21:24.788] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0613 01:21:24.788] has:Object 'Kind' is missing
I0613 01:21:24.885] generic-resources.sh:318: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0613 01:21:25.065] (Bgeneric-resources.sh:322: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:21:25.067] (BSuccessful
I0613 01:21:25.067] message:warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
I0613 01:21:25.067] pod "busybox0" force deleted
I0613 01:21:25.067] pod "busybox1" force deleted
I0613 01:21:25.068] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0613 01:21:25.068] has:Object 'Kind' is missing
I0613 01:21:25.151] generic-resources.sh:327: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:21:25.310] (Breplicationcontroller/busybox0 created
I0613 01:21:25.314] replicationcontroller/busybox1 created
I0613 01:21:25.411] generic-resources.sh:331: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0613 01:21:25.496] (Bgeneric-resources.sh:336: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0613 01:21:25.578] (Bgeneric-resources.sh:337: Successful get rc busybox0 {{.spec.replicas}}: 1
I0613 01:21:25.660] (Bgeneric-resources.sh:338: Successful get rc busybox1 {{.spec.replicas}}: 1
I0613 01:21:25.824] (Bgeneric-resources.sh:343: Successful get hpa busybox0 {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 1 2 80
I0613 01:21:25.913] (Bgeneric-resources.sh:344: Successful get hpa busybox1 {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 1 2 80
I0613 01:21:25.915] (BSuccessful
I0613 01:21:25.915] message:horizontalpodautoscaler.autoscaling/busybox0 autoscaled
I0613 01:21:25.915] horizontalpodautoscaler.autoscaling/busybox1 autoscaled
I0613 01:21:25.915] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0613 01:21:25.916] has:Object 'Kind' is missing
I0613 01:21:25.985] horizontalpodautoscaler.autoscaling "busybox0" deleted
I0613 01:21:26.060] horizontalpodautoscaler.autoscaling "busybox1" deleted
I0613 01:21:26.151] generic-resources.sh:352: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0613 01:21:26.226] (Bgeneric-resources.sh:353: Successful get rc busybox0 {{.spec.replicas}}: 1
I0613 01:21:26.323] (Bgeneric-resources.sh:354: Successful get rc busybox1 {{.spec.replicas}}: 1
I0613 01:21:26.502] (Bgeneric-resources.sh:358: Successful get service busybox0 {{(index .spec.ports 0).name}} {{(index .spec.ports 0).port}}: <no value> 80
I0613 01:21:26.585] (Bgeneric-resources.sh:359: Successful get service busybox1 {{(index .spec.ports 0).name}} {{(index .spec.ports 0).port}}: <no value> 80
I0613 01:21:26.588] (BSuccessful
I0613 01:21:26.588] message:service/busybox0 exposed
I0613 01:21:26.588] service/busybox1 exposed
I0613 01:21:26.589] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0613 01:21:26.589] has:Object 'Kind' is missing
I0613 01:21:26.692] generic-resources.sh:365: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0613 01:21:26.776] (Bgeneric-resources.sh:366: Successful get rc busybox0 {{.spec.replicas}}: 1
I0613 01:21:26.883] (Bgeneric-resources.sh:367: Successful get rc busybox1 {{.spec.replicas}}: 1
I0613 01:21:27.095] (Bgeneric-resources.sh:371: Successful get rc busybox0 {{.spec.replicas}}: 2
I0613 01:21:27.185] (Bgeneric-resources.sh:372: Successful get rc busybox1 {{.spec.replicas}}: 2
I0613 01:21:27.188] (BSuccessful
I0613 01:21:27.188] message:replicationcontroller/busybox0 scaled
I0613 01:21:27.188] replicationcontroller/busybox1 scaled
I0613 01:21:27.188] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0613 01:21:27.188] has:Object 'Kind' is missing
I0613 01:21:27.275] generic-resources.sh:377: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0613 01:21:27.453] (Bgeneric-resources.sh:381: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:21:27.455] (BSuccessful
I0613 01:21:27.456] message:warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
I0613 01:21:27.456] replicationcontroller "busybox0" force deleted
I0613 01:21:27.456] replicationcontroller "busybox1" force deleted
I0613 01:21:27.456] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0613 01:21:27.457] has:Object 'Kind' is missing
I0613 01:21:27.540] generic-resources.sh:386: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:21:27.696] (Bdeployment.apps/nginx1-deployment created
I0613 01:21:27.703] deployment.apps/nginx0-deployment created
W0613 01:21:27.803] kubectl run --generator=deployment/apps.v1 is DEPRECATED and will be removed in a future version. Use kubectl run --generator=run-pod/v1 or kubectl create instead.
W0613 01:21:27.804] I0613 01:21:20.932877   52013 event.go:258] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1560388880-11131", Name:"test1", UID:"b609a5c5-2fc2-401c-ac9d-369515fe0eb8", APIVersion:"apps/v1", ResourceVersion:"995", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set test1-67487cf556 to 1
W0613 01:21:27.804] I0613 01:21:20.940572   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1560388880-11131", Name:"test1-67487cf556", UID:"bee30ea8-4814-423f-abf5-d30788d6f462", APIVersion:"apps/v1", ResourceVersion:"996", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: test1-67487cf556-n74st
W0613 01:21:27.804] W0613 01:21:21.170284   48662 cacher.go:154] Terminating all watchers from cacher *unstructured.Unstructured
W0613 01:21:27.804] E0613 01:21:21.171730   52013 reflector.go:283] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to watch *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.805] W0613 01:21:21.303608   48662 cacher.go:154] Terminating all watchers from cacher *unstructured.Unstructured
W0613 01:21:27.805] E0613 01:21:21.305202   52013 reflector.go:283] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to watch *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.805] W0613 01:21:21.410052   48662 cacher.go:154] Terminating all watchers from cacher *unstructured.Unstructured
W0613 01:21:27.805] E0613 01:21:21.411523   52013 reflector.go:283] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to watch *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.805] W0613 01:21:21.524962   48662 cacher.go:154] Terminating all watchers from cacher *unstructured.Unstructured
W0613 01:21:27.806] E0613 01:21:21.526664   52013 reflector.go:283] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to watch *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.806] E0613 01:21:22.173156   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.806] E0613 01:21:22.306538   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.806] E0613 01:21:22.412749   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.806] E0613 01:21:22.528033   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.807] E0613 01:21:23.174850   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.807] E0613 01:21:23.308062   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.807] E0613 01:21:23.414247   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.808] I0613 01:21:23.442386   52013 event.go:258] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1560388881-14989", Name:"nginx", UID:"306adecd-6a75-41e2-ad18-36f5195f52eb", APIVersion:"apps/v1", ResourceVersion:"1022", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-74bc9cdd45 to 3
W0613 01:21:27.808] I0613 01:21:23.447468   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1560388881-14989", Name:"nginx-74bc9cdd45", UID:"197281a4-21fd-4539-a408-d18d83d1e1c8", APIVersion:"apps/v1", ResourceVersion:"1023", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-74bc9cdd45-spj5t
W0613 01:21:27.808] I0613 01:21:23.452382   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1560388881-14989", Name:"nginx-74bc9cdd45", UID:"197281a4-21fd-4539-a408-d18d83d1e1c8", APIVersion:"apps/v1", ResourceVersion:"1023", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-74bc9cdd45-qb5zm
W0613 01:21:27.809] I0613 01:21:23.452707   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1560388881-14989", Name:"nginx-74bc9cdd45", UID:"197281a4-21fd-4539-a408-d18d83d1e1c8", APIVersion:"apps/v1", ResourceVersion:"1023", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-74bc9cdd45-dhgjj
W0613 01:21:27.809] E0613 01:21:23.529259   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.809] kubectl convert is DEPRECATED and will be removed in a future version.
W0613 01:21:27.809] In order to convert, kubectl apply the object to the cluster, then kubectl get at the desired version.
W0613 01:21:27.810] E0613 01:21:24.176039   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.810] E0613 01:21:24.309647   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.810] E0613 01:21:24.415696   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.810] E0613 01:21:24.530491   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.811] E0613 01:21:25.177014   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.811] E0613 01:21:25.310632   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.811] error: error validating "hack/testdata/recursive/rc/rc/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
W0613 01:21:27.812] I0613 01:21:25.314297   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1560388881-14989", Name:"busybox0", UID:"d0723790-3a71-47e0-a1ce-5dad2445e868", APIVersion:"v1", ResourceVersion:"1053", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox0-x4sx6
W0613 01:21:27.812] I0613 01:21:25.318725   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1560388881-14989", Name:"busybox1", UID:"a82f8afb-5558-47f3-8acd-984acc9980d1", APIVersion:"v1", ResourceVersion:"1055", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox1-pdqlb
W0613 01:21:27.812] E0613 01:21:25.417094   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.813] E0613 01:21:25.531678   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.813] E0613 01:21:26.178209   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.813] E0613 01:21:26.319074   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.813] E0613 01:21:26.417929   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.813] E0613 01:21:26.533139   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.814] I0613 01:21:26.988021   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1560388881-14989", Name:"busybox0", UID:"d0723790-3a71-47e0-a1ce-5dad2445e868", APIVersion:"v1", ResourceVersion:"1073", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox0-8p4d2
W0613 01:21:27.814] I0613 01:21:26.995502   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1560388881-14989", Name:"busybox1", UID:"a82f8afb-5558-47f3-8acd-984acc9980d1", APIVersion:"v1", ResourceVersion:"1076", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox1-c4vch
W0613 01:21:27.814] E0613 01:21:27.179361   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.815] E0613 01:21:27.320418   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.815] E0613 01:21:27.419265   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.815] E0613 01:21:27.534195   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:27.815] error: error validating "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
W0613 01:21:27.816] I0613 01:21:27.703833   52013 event.go:258] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1560388881-14989", Name:"nginx1-deployment", UID:"3c343cbd-d642-4564-936a-d4ddd1a10cb8", APIVersion:"apps/v1", ResourceVersion:"1094", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx1-deployment-746b5db49 to 2
W0613 01:21:27.816] I0613 01:21:27.708978   52013 event.go:258] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1560388881-14989", Name:"nginx0-deployment", UID:"a36347a5-6b7d-46cf-9b92-38bb3467b8c3", APIVersion:"apps/v1", ResourceVersion:"1095", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx0-deployment-5679bdc9fb to 2
W0613 01:21:27.816] I0613 01:21:27.712824   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1560388881-14989", Name:"nginx1-deployment-746b5db49", UID:"8cddd252-0825-4158-a3ef-292767e9478d", APIVersion:"apps/v1", ResourceVersion:"1096", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx1-deployment-746b5db49-gr2q4
W0613 01:21:27.817] I0613 01:21:27.713538   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1560388881-14989", Name:"nginx0-deployment-5679bdc9fb", UID:"cebe2685-d3e0-490d-b20c-af30285efde9", APIVersion:"apps/v1", ResourceVersion:"1098", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx0-deployment-5679bdc9fb-nlcmb
W0613 01:21:27.817] I0613 01:21:27.718065   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1560388881-14989", Name:"nginx0-deployment-5679bdc9fb", UID:"cebe2685-d3e0-490d-b20c-af30285efde9", APIVersion:"apps/v1", ResourceVersion:"1098", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx0-deployment-5679bdc9fb-l96d7
W0613 01:21:27.817] I0613 01:21:27.722092   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1560388881-14989", Name:"nginx1-deployment-746b5db49", UID:"8cddd252-0825-4158-a3ef-292767e9478d", APIVersion:"apps/v1", ResourceVersion:"1096", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx1-deployment-746b5db49-2klfs
I0613 01:21:27.918] generic-resources.sh:390: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx0-deployment:nginx1-deployment:
I0613 01:21:27.919] (Bgeneric-resources.sh:391: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:k8s.gcr.io/nginx:1.7.9:
I0613 01:21:28.103] (Bgeneric-resources.sh:395: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:k8s.gcr.io/nginx:1.7.9:
I0613 01:21:28.105] (BSuccessful
I0613 01:21:28.105] message:deployment.apps/nginx1-deployment skipped rollback (current template already matches revision 1)
I0613 01:21:28.106] deployment.apps/nginx0-deployment skipped rollback (current template already matches revision 1)
I0613 01:21:28.106] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0613 01:21:28.106] has:Object 'Kind' is missing
W0613 01:21:28.207] E0613 01:21:28.180656   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:21:28.307] deployment.apps/nginx1-deployment paused
I0613 01:21:28.308] deployment.apps/nginx0-deployment paused
I0613 01:21:28.326] generic-resources.sh:402: Successful get deployment {{range.items}}{{.spec.paused}}:{{end}}: true:true:
I0613 01:21:28.328] (BSuccessful
I0613 01:21:28.328] message:unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0613 01:21:28.329] has:Object 'Kind' is missing
I0613 01:21:28.416] deployment.apps/nginx1-deployment resumed
I0613 01:21:28.424] deployment.apps/nginx0-deployment resumed
W0613 01:21:28.525] E0613 01:21:28.321599   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:28.526] E0613 01:21:28.420528   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:28.536] E0613 01:21:28.535511   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:21:28.637] generic-resources.sh:408: Successful get deployment {{range.items}}{{.spec.paused}}:{{end}}: <no value>:<no value>:
I0613 01:21:28.637] (BSuccessful
I0613 01:21:28.638] message:unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0613 01:21:28.638] has:Object 'Kind' is missing
I0613 01:21:28.691] Successful
I0613 01:21:28.691] message:deployment.apps/nginx1-deployment 
I0613 01:21:28.692] REVISION  CHANGE-CAUSE
I0613 01:21:28.692] 1         <none>
I0613 01:21:28.692] 
I0613 01:21:28.692] deployment.apps/nginx0-deployment 
I0613 01:21:28.692] REVISION  CHANGE-CAUSE
I0613 01:21:28.692] 1         <none>
I0613 01:21:28.692] 
I0613 01:21:28.693] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0613 01:21:28.693] has:nginx0-deployment
I0613 01:21:28.694] Successful
I0613 01:21:28.694] message:deployment.apps/nginx1-deployment 
I0613 01:21:28.694] REVISION  CHANGE-CAUSE
I0613 01:21:28.694] 1         <none>
I0613 01:21:28.694] 
I0613 01:21:28.694] deployment.apps/nginx0-deployment 
I0613 01:21:28.694] REVISION  CHANGE-CAUSE
I0613 01:21:28.694] 1         <none>
I0613 01:21:28.694] 
I0613 01:21:28.695] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0613 01:21:28.695] has:nginx1-deployment
I0613 01:21:28.695] Successful
I0613 01:21:28.696] message:deployment.apps/nginx1-deployment 
I0613 01:21:28.696] REVISION  CHANGE-CAUSE
I0613 01:21:28.696] 1         <none>
I0613 01:21:28.696] 
I0613 01:21:28.696] deployment.apps/nginx0-deployment 
I0613 01:21:28.696] REVISION  CHANGE-CAUSE
I0613 01:21:28.696] 1         <none>
I0613 01:21:28.696] 
I0613 01:21:28.697] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0613 01:21:28.697] has:Object 'Kind' is missing
I0613 01:21:28.778] deployment.apps "nginx1-deployment" force deleted
I0613 01:21:28.784] deployment.apps "nginx0-deployment" force deleted
W0613 01:21:28.885] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
W0613 01:21:28.885] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
W0613 01:21:29.183] E0613 01:21:29.182320   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:29.323] E0613 01:21:29.323102   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:29.422] E0613 01:21:29.421689   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:29.537] E0613 01:21:29.536756   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:21:29.893] generic-resources.sh:424: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:21:30.082] (Breplicationcontroller/busybox0 created
I0613 01:21:30.092] replicationcontroller/busybox1 created
I0613 01:21:30.196] generic-resources.sh:428: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0613 01:21:30.289] (BSuccessful
I0613 01:21:30.289] message:no rollbacker has been implemented for "ReplicationController"
... skipping 3 lines ...
I0613 01:21:30.292] Successful
I0613 01:21:30.292] message:no rollbacker has been implemented for "ReplicationController"
I0613 01:21:30.292] no rollbacker has been implemented for "ReplicationController"
I0613 01:21:30.293] unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0613 01:21:30.293] has:Object 'Kind' is missing
W0613 01:21:30.394] I0613 01:21:30.086886   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1560388881-14989", Name:"busybox0", UID:"56dff59d-9ce2-4165-bb7b-ab4a72dc6c9b", APIVersion:"v1", ResourceVersion:"1143", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox0-4khff
W0613 01:21:30.394] error: error validating "hack/testdata/recursive/rc/rc/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
W0613 01:21:30.394] I0613 01:21:30.096488   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1560388881-14989", Name:"busybox1", UID:"925aa994-34e5-4d13-aab4-466bd94d0979", APIVersion:"v1", ResourceVersion:"1145", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox1-pnrrc
W0613 01:21:30.395] E0613 01:21:30.183726   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:30.395] E0613 01:21:30.325276   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:30.425] E0613 01:21:30.425128   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:21:30.526] Successful
I0613 01:21:30.527] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0613 01:21:30.527] error: replicationcontrollers "busybox0" pausing is not supported
I0613 01:21:30.527] error: replicationcontrollers "busybox1" pausing is not supported
I0613 01:21:30.527] has:Object 'Kind' is missing
I0613 01:21:30.527] Successful
I0613 01:21:30.527] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0613 01:21:30.527] error: replicationcontrollers "busybox0" pausing is not supported
I0613 01:21:30.527] error: replicationcontrollers "busybox1" pausing is not supported
I0613 01:21:30.528] has:replicationcontrollers "busybox0" pausing is not supported
I0613 01:21:30.528] Successful
I0613 01:21:30.528] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0613 01:21:30.528] error: replicationcontrollers "busybox0" pausing is not supported
I0613 01:21:30.528] error: replicationcontrollers "busybox1" pausing is not supported
I0613 01:21:30.528] has:replicationcontrollers "busybox1" pausing is not supported
I0613 01:21:30.528] Successful
I0613 01:21:30.529] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0613 01:21:30.529] error: replicationcontrollers "busybox0" resuming is not supported
I0613 01:21:30.529] error: replicationcontrollers "busybox1" resuming is not supported
I0613 01:21:30.529] has:Object 'Kind' is missing
I0613 01:21:30.529] Successful
I0613 01:21:30.530] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0613 01:21:30.530] error: replicationcontrollers "busybox0" resuming is not supported
I0613 01:21:30.530] error: replicationcontrollers "busybox1" resuming is not supported
I0613 01:21:30.530] has:replicationcontrollers "busybox0" resuming is not supported
I0613 01:21:30.530] Successful
I0613 01:21:30.530] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0613 01:21:30.530] error: replicationcontrollers "busybox0" resuming is not supported
I0613 01:21:30.530] error: replicationcontrollers "busybox1" resuming is not supported
I0613 01:21:30.531] has:replicationcontrollers "busybox0" resuming is not supported
I0613 01:21:30.581] replicationcontroller "busybox0" force deleted
I0613 01:21:30.587] replicationcontroller "busybox1" force deleted
W0613 01:21:30.688] E0613 01:21:30.538029   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:30.689] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
W0613 01:21:30.689] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
W0613 01:21:31.185] E0613 01:21:31.185283   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:31.328] E0613 01:21:31.328027   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:31.428] E0613 01:21:31.427505   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:31.540] E0613 01:21:31.539640   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:21:31.640] Recording: run_namespace_tests
I0613 01:21:31.641] Running command: run_namespace_tests
I0613 01:21:31.641] 
I0613 01:21:31.641] +++ Running case: test-cmd.run_namespace_tests 
I0613 01:21:31.641] +++ working dir: /go/src/k8s.io/kubernetes
I0613 01:21:31.642] +++ command: run_namespace_tests
I0613 01:21:31.642] +++ [0613 01:21:31] Testing kubectl(v1:namespaces)
I0613 01:21:31.703] namespace/my-namespace created
I0613 01:21:31.814] core.sh:1312: Successful get namespaces/my-namespace {{.metadata.name}}: my-namespace
I0613 01:21:31.896] (Bnamespace "my-namespace" deleted
W0613 01:21:32.187] E0613 01:21:32.186588   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:32.329] E0613 01:21:32.329340   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:32.429] E0613 01:21:32.428977   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:32.541] E0613 01:21:32.540831   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:33.188] E0613 01:21:33.187944   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:33.331] E0613 01:21:33.330662   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:33.430] E0613 01:21:33.430226   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:33.542] E0613 01:21:33.542086   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:34.190] E0613 01:21:34.189364   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:34.332] E0613 01:21:34.332127   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:34.432] E0613 01:21:34.431467   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:34.544] E0613 01:21:34.543394   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:35.191] E0613 01:21:35.190555   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:35.334] E0613 01:21:35.333452   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:35.433] E0613 01:21:35.432676   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:35.545] E0613 01:21:35.544480   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:36.192] E0613 01:21:36.191746   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:36.335] E0613 01:21:36.334813   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:36.434] E0613 01:21:36.433947   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:36.546] E0613 01:21:36.545931   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:21:36.987] namespace/my-namespace condition met
I0613 01:21:37.083] Successful
I0613 01:21:37.083] message:Error from server (NotFound): namespaces "my-namespace" not found
I0613 01:21:37.083] has: not found
I0613 01:21:37.179] namespace/my-namespace created
I0613 01:21:37.271] core.sh:1321: Successful get namespaces/my-namespace {{.metadata.name}}: my-namespace
I0613 01:21:37.502] (BSuccessful
I0613 01:21:37.503] message:warning: deleting cluster-scoped resources, not scoped to the provided namespace
I0613 01:21:37.503] namespace "kube-node-lease" deleted
... skipping 30 lines ...
I0613 01:21:37.509] namespace "namespace-1560388843-4471" deleted
I0613 01:21:37.509] namespace "namespace-1560388844-507" deleted
I0613 01:21:37.509] namespace "namespace-1560388846-16858" deleted
I0613 01:21:37.509] namespace "namespace-1560388847-2540" deleted
I0613 01:21:37.509] namespace "namespace-1560388880-11131" deleted
I0613 01:21:37.509] namespace "namespace-1560388881-14989" deleted
I0613 01:21:37.510] Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted
I0613 01:21:37.510] Error from server (Forbidden): namespaces "kube-public" is forbidden: this namespace may not be deleted
I0613 01:21:37.510] Error from server (Forbidden): namespaces "kube-system" is forbidden: this namespace may not be deleted
I0613 01:21:37.510] has:warning: deleting cluster-scoped resources
I0613 01:21:37.510] Successful
I0613 01:21:37.511] message:warning: deleting cluster-scoped resources, not scoped to the provided namespace
I0613 01:21:37.511] namespace "kube-node-lease" deleted
I0613 01:21:37.511] namespace "my-namespace" deleted
I0613 01:21:37.511] namespace "namespace-1560388746-3034" deleted
... skipping 28 lines ...
I0613 01:21:37.516] namespace "namespace-1560388843-4471" deleted
I0613 01:21:37.516] namespace "namespace-1560388844-507" deleted
I0613 01:21:37.516] namespace "namespace-1560388846-16858" deleted
I0613 01:21:37.517] namespace "namespace-1560388847-2540" deleted
I0613 01:21:37.517] namespace "namespace-1560388880-11131" deleted
I0613 01:21:37.517] namespace "namespace-1560388881-14989" deleted
I0613 01:21:37.517] Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted
I0613 01:21:37.517] Error from server (Forbidden): namespaces "kube-public" is forbidden: this namespace may not be deleted
I0613 01:21:37.518] Error from server (Forbidden): namespaces "kube-system" is forbidden: this namespace may not be deleted
I0613 01:21:37.518] has:namespace "my-namespace" deleted
I0613 01:21:37.618] core.sh:1333: Successful get namespaces {{range.items}}{{ if eq $id_field \"other\" }}found{{end}}{{end}}:: :
I0613 01:21:37.688] (Bnamespace/other created
I0613 01:21:37.777] core.sh:1337: Successful get namespaces/other {{.metadata.name}}: other
I0613 01:21:37.865] (Bcore.sh:1341: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:21:38.031] (Bpod/valid-pod created
W0613 01:21:38.132] E0613 01:21:37.194025   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:38.132] E0613 01:21:37.335914   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:38.132] E0613 01:21:37.434847   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:38.132] E0613 01:21:37.547110   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:38.132] I0613 01:21:38.090469   52013 controller_utils.go:1029] Waiting for caches to sync for resource quota controller
W0613 01:21:38.191] I0613 01:21:38.190743   52013 controller_utils.go:1036] Caches are synced for resource quota controller
W0613 01:21:38.196] E0613 01:21:38.195716   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:21:38.297] core.sh:1345: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0613 01:21:38.297] (Bcore.sh:1347: Successful get pods -n other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0613 01:21:38.318] (BSuccessful
I0613 01:21:38.319] message:error: a resource cannot be retrieved by name across all namespaces
I0613 01:21:38.319] has:a resource cannot be retrieved by name across all namespaces
I0613 01:21:38.411] core.sh:1354: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0613 01:21:38.515] (Bpod "valid-pod" force deleted
I0613 01:21:38.611] core.sh:1358: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:21:38.686] (Bnamespace "other" deleted
W0613 01:21:38.786] E0613 01:21:38.337348   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:38.787] E0613 01:21:38.436534   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:38.787] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
W0613 01:21:38.787] E0613 01:21:38.548549   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:38.787] I0613 01:21:38.603118   52013 controller_utils.go:1029] Waiting for caches to sync for garbage collector controller
W0613 01:21:38.787] I0613 01:21:38.703487   52013 controller_utils.go:1036] Caches are synced for garbage collector controller
W0613 01:21:39.197] E0613 01:21:39.197168   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:39.339] E0613 01:21:39.338712   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:39.438] E0613 01:21:39.437776   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:39.550] E0613 01:21:39.549910   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:40.199] E0613 01:21:40.198353   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:40.340] E0613 01:21:40.339897   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:40.439] E0613 01:21:40.439168   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:40.552] E0613 01:21:40.551453   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:40.732] I0613 01:21:40.731546   52013 horizontal.go:340] Horizontal Pod Autoscaler busybox0 has been deleted in namespace-1560388881-14989
W0613 01:21:40.736] I0613 01:21:40.735908   52013 horizontal.go:340] Horizontal Pod Autoscaler busybox1 has been deleted in namespace-1560388881-14989
W0613 01:21:41.200] E0613 01:21:41.199774   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:41.342] E0613 01:21:41.341539   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:41.440] E0613 01:21:41.440315   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:41.553] E0613 01:21:41.552878   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:42.201] E0613 01:21:42.201032   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:42.343] E0613 01:21:42.342896   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:42.450] E0613 01:21:42.449888   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:42.554] E0613 01:21:42.554077   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:43.202] E0613 01:21:43.202053   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:43.345] E0613 01:21:43.344379   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:43.453] E0613 01:21:43.452648   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:43.556] E0613 01:21:43.555357   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:21:43.786] +++ exit code: 0
I0613 01:21:43.878] Recording: run_secrets_test
I0613 01:21:43.878] Running command: run_secrets_test
I0613 01:21:43.897] 
I0613 01:21:43.899] +++ Running case: test-cmd.run_secrets_test 
I0613 01:21:43.900] +++ working dir: /go/src/k8s.io/kubernetes
... skipping 58 lines ...
I0613 01:21:45.752] (Bsecret "test-secret" deleted
I0613 01:21:45.826] secret/test-secret created
I0613 01:21:45.921] core.sh:761: Successful get secret/test-secret --namespace=test-secrets {{.metadata.name}}: test-secret
I0613 01:21:45.999] (Bcore.sh:762: Successful get secret/test-secret --namespace=test-secrets {{.type}}: kubernetes.io/tls
I0613 01:21:46.068] (Bsecret "test-secret" deleted
W0613 01:21:46.169] I0613 01:21:44.107887   69697 loader.go:359] Config loaded from file:  /tmp/tmp.A7KUUYjBfl/.kube/config
W0613 01:21:46.169] E0613 01:21:44.203080   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:46.170] E0613 01:21:44.345602   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:46.170] E0613 01:21:44.453646   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:46.170] E0613 01:21:44.556650   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:46.171] E0613 01:21:45.205048   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:46.171] E0613 01:21:45.346726   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:46.171] E0613 01:21:45.455072   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:46.172] E0613 01:21:45.558046   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:46.206] E0613 01:21:46.206372   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:21:46.307] secret/secret-string-data created
I0613 01:21:46.363] core.sh:784: Successful get secret/secret-string-data --namespace=test-secrets  {{.data}}: map[k1:djE= k2:djI=]
I0613 01:21:46.450] (Bcore.sh:785: Successful get secret/secret-string-data --namespace=test-secrets  {{.data}}: map[k1:djE= k2:djI=]
I0613 01:21:46.530] (Bcore.sh:786: Successful get secret/secret-string-data --namespace=test-secrets  {{.stringData}}: <no value>
I0613 01:21:46.612] (Bsecret "secret-string-data" deleted
I0613 01:21:46.699] core.sh:795: Successful get secrets --namespace=test-secrets {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:21:46.852] (Bsecret "test-secret" deleted
I0613 01:21:46.928] namespace "test-secrets" deleted
W0613 01:21:47.028] E0613 01:21:46.347740   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:47.029] E0613 01:21:46.456221   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:47.029] E0613 01:21:46.559315   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:47.207] E0613 01:21:47.207339   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:47.350] E0613 01:21:47.349541   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:47.461] E0613 01:21:47.457471   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:47.561] E0613 01:21:47.560477   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:48.209] E0613 01:21:48.208809   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:48.351] E0613 01:21:48.350932   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:48.461] E0613 01:21:48.460550   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:48.562] E0613 01:21:48.562003   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:49.211] E0613 01:21:49.211313   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:49.353] E0613 01:21:49.352956   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:49.462] E0613 01:21:49.461707   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:49.565] E0613 01:21:49.565234   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:50.214] E0613 01:21:50.213472   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:50.355] E0613 01:21:50.355046   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:50.463] E0613 01:21:50.463002   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:50.567] E0613 01:21:50.566566   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:51.215] E0613 01:21:51.214937   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:51.356] E0613 01:21:51.356305   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:51.465] E0613 01:21:51.465112   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:51.568] E0613 01:21:51.567945   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:21:52.031] +++ exit code: 0
I0613 01:21:52.060] Recording: run_configmap_tests
I0613 01:21:52.061] Running command: run_configmap_tests
I0613 01:21:52.082] 
I0613 01:21:52.084] +++ Running case: test-cmd.run_configmap_tests 
I0613 01:21:52.086] +++ working dir: /go/src/k8s.io/kubernetes
I0613 01:21:52.088] +++ command: run_configmap_tests
I0613 01:21:52.098] +++ [0613 01:21:52] Creating namespace namespace-1560388912-22961
I0613 01:21:52.165] namespace/namespace-1560388912-22961 created
I0613 01:21:52.232] Context "test" modified.
I0613 01:21:52.239] +++ [0613 01:21:52] Testing configmaps
W0613 01:21:52.340] E0613 01:21:52.216338   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:52.358] E0613 01:21:52.357483   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:21:52.458] configmap/test-configmap created
I0613 01:21:52.536] core.sh:28: Successful get configmap/test-configmap {{.metadata.name}}: test-configmap
I0613 01:21:52.609] (Bconfigmap "test-configmap" deleted
I0613 01:21:52.698] core.sh:33: Successful get namespaces {{range.items}}{{ if eq $id_field \"test-configmaps\" }}found{{end}}{{end}}:: :
I0613 01:21:52.764] (Bnamespace/test-configmaps created
I0613 01:21:52.847] core.sh:37: Successful get namespaces/test-configmaps {{.metadata.name}}: test-configmaps
... skipping 3 lines ...
I0613 01:21:53.143] configmap/test-binary-configmap created
I0613 01:21:53.226] core.sh:48: Successful get configmap/test-configmap --namespace=test-configmaps {{.metadata.name}}: test-configmap
I0613 01:21:53.310] (Bcore.sh:49: Successful get configmap/test-binary-configmap --namespace=test-configmaps {{.metadata.name}}: test-binary-configmap
I0613 01:21:53.526] (Bconfigmap "test-configmap" deleted
I0613 01:21:53.599] configmap "test-binary-configmap" deleted
I0613 01:21:53.669] namespace "test-configmaps" deleted
W0613 01:21:53.772] E0613 01:21:52.466234   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:53.773] E0613 01:21:52.569221   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:53.773] E0613 01:21:53.217506   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:53.774] E0613 01:21:53.358744   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:53.774] E0613 01:21:53.467607   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:53.774] E0613 01:21:53.570470   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:54.219] E0613 01:21:54.219166   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:54.361] E0613 01:21:54.360427   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:54.469] E0613 01:21:54.468979   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:54.572] E0613 01:21:54.571763   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:55.221] E0613 01:21:55.220565   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:55.362] E0613 01:21:55.361766   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:55.470] E0613 01:21:55.470270   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:55.573] E0613 01:21:55.573220   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:56.222] E0613 01:21:56.222029   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:56.363] E0613 01:21:56.363165   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:56.472] E0613 01:21:56.471675   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:56.575] E0613 01:21:56.574693   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:57.224] E0613 01:21:57.223643   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:57.365] E0613 01:21:57.364592   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:57.473] E0613 01:21:57.473124   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:57.576] E0613 01:21:57.576212   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:58.225] E0613 01:21:58.225003   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:58.366] E0613 01:21:58.366090   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:58.475] E0613 01:21:58.474582   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:58.578] E0613 01:21:58.577530   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:21:58.799] +++ exit code: 0
I0613 01:21:58.835] Recording: run_client_config_tests
I0613 01:21:58.835] Running command: run_client_config_tests
I0613 01:21:58.858] 
I0613 01:21:58.860] +++ Running case: test-cmd.run_client_config_tests 
I0613 01:21:58.863] +++ working dir: /go/src/k8s.io/kubernetes
I0613 01:21:58.865] +++ command: run_client_config_tests
I0613 01:21:58.879] +++ [0613 01:21:58] Creating namespace namespace-1560388918-7167
I0613 01:21:58.953] namespace/namespace-1560388918-7167 created
I0613 01:21:59.029] Context "test" modified.
I0613 01:21:59.037] +++ [0613 01:21:59] Testing client config
I0613 01:21:59.105] Successful
I0613 01:21:59.105] message:error: stat missing: no such file or directory
I0613 01:21:59.105] has:missing: no such file or directory
I0613 01:21:59.184] Successful
I0613 01:21:59.185] message:error: stat missing: no such file or directory
I0613 01:21:59.185] has:missing: no such file or directory
I0613 01:21:59.255] Successful
I0613 01:21:59.255] message:error: stat missing: no such file or directory
I0613 01:21:59.255] has:missing: no such file or directory
I0613 01:21:59.338] Successful
I0613 01:21:59.339] message:Error in configuration: context was not found for specified context: missing-context
I0613 01:21:59.339] has:context was not found for specified context: missing-context
I0613 01:21:59.408] Successful
I0613 01:21:59.408] message:error: no server found for cluster "missing-cluster"
I0613 01:21:59.408] has:no server found for cluster "missing-cluster"
I0613 01:21:59.481] Successful
I0613 01:21:59.482] message:error: auth info "missing-user" does not exist
I0613 01:21:59.482] has:auth info "missing-user" does not exist
W0613 01:21:59.582] E0613 01:21:59.226383   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:59.583] E0613 01:21:59.367793   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:59.583] E0613 01:21:59.475853   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:21:59.583] E0613 01:21:59.579216   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:21:59.684] Successful
I0613 01:21:59.684] message:error: Error loading config file "/tmp/newconfig.yaml": no kind "Config" is registered for version "v-1" in scheme "k8s.io/client-go/tools/clientcmd/api/latest/latest.go:50"
I0613 01:21:59.684] has:Error loading config file
I0613 01:21:59.693] Successful
I0613 01:21:59.693] message:error: stat missing-config: no such file or directory
I0613 01:21:59.693] has:no such file or directory
I0613 01:21:59.705] +++ exit code: 0
I0613 01:21:59.737] Recording: run_service_accounts_tests
I0613 01:21:59.737] Running command: run_service_accounts_tests
I0613 01:21:59.760] 
I0613 01:21:59.762] +++ Running case: test-cmd.run_service_accounts_tests 
... skipping 7 lines ...
I0613 01:22:00.108] (Bnamespace/test-service-accounts created
I0613 01:22:00.200] core.sh:820: Successful get namespaces/test-service-accounts {{.metadata.name}}: test-service-accounts
I0613 01:22:00.271] (Bserviceaccount/test-service-account created
I0613 01:22:00.364] core.sh:826: Successful get serviceaccount/test-service-account --namespace=test-service-accounts {{.metadata.name}}: test-service-account
I0613 01:22:00.441] (Bserviceaccount "test-service-account" deleted
I0613 01:22:00.528] namespace "test-service-accounts" deleted
W0613 01:22:00.628] E0613 01:22:00.227625   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:00.629] E0613 01:22:00.369210   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:00.629] E0613 01:22:00.477709   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:00.629] E0613 01:22:00.580698   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:01.230] E0613 01:22:01.229228   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:01.372] E0613 01:22:01.371764   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:01.480] E0613 01:22:01.479906   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:01.583] E0613 01:22:01.582365   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:02.232] E0613 01:22:02.232289   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:02.374] E0613 01:22:02.373515   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:02.482] E0613 01:22:02.481543   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:02.584] E0613 01:22:02.584151   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:03.234] E0613 01:22:03.233791   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:03.375] E0613 01:22:03.374796   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:03.483] E0613 01:22:03.482989   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:03.586] E0613 01:22:03.585624   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:04.235] E0613 01:22:04.235074   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:04.376] E0613 01:22:04.376165   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:04.485] E0613 01:22:04.484730   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:04.588] E0613 01:22:04.587287   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:05.237] E0613 01:22:05.236917   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:05.378] E0613 01:22:05.377721   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:05.486] E0613 01:22:05.486347   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:05.588] E0613 01:22:05.588270   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:22:05.689] +++ exit code: 0
I0613 01:22:05.690] Recording: run_job_tests
I0613 01:22:05.690] Running command: run_job_tests
I0613 01:22:05.699] 
I0613 01:22:05.702] +++ Running case: test-cmd.run_job_tests 
I0613 01:22:05.703] +++ working dir: /go/src/k8s.io/kubernetes
... skipping 14 lines ...
I0613 01:22:06.471] Labels:                        run=pi
I0613 01:22:06.471] Annotations:                   <none>
I0613 01:22:06.471] Schedule:                      59 23 31 2 *
I0613 01:22:06.471] Concurrency Policy:            Allow
I0613 01:22:06.471] Suspend:                       False
I0613 01:22:06.472] Successful Job History Limit:  3
I0613 01:22:06.472] Failed Job History Limit:      1
I0613 01:22:06.472] Starting Deadline Seconds:     <unset>
I0613 01:22:06.472] Selector:                      <unset>
I0613 01:22:06.472] Parallelism:                   <unset>
I0613 01:22:06.472] Completions:                   <unset>
I0613 01:22:06.472] Pod Template:
I0613 01:22:06.472]   Labels:  run=pi
... skipping 19 lines ...
I0613 01:22:06.558] Successful
I0613 01:22:06.558] message:job.batch/test-job
I0613 01:22:06.558] has:job.batch/test-job
I0613 01:22:06.654] batch.sh:48: Successful get jobs {{range.items}}{{.metadata.name}}{{end}}: 
I0613 01:22:06.741] (Bjob.batch/test-job created
W0613 01:22:06.842] kubectl run --generator=cronjob/v1beta1 is DEPRECATED and will be removed in a future version. Use kubectl run --generator=run-pod/v1 or kubectl create instead.
W0613 01:22:06.842] E0613 01:22:06.238215   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:06.843] E0613 01:22:06.379119   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:06.843] E0613 01:22:06.488010   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:06.843] E0613 01:22:06.589849   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:06.843] I0613 01:22:06.744906   52013 event.go:258] Event(v1.ObjectReference{Kind:"Job", Namespace:"test-jobs", Name:"test-job", UID:"49c631c0-191f-4fa9-b356-cf98af85b2c7", APIVersion:"batch/v1", ResourceVersion:"1436", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: test-job-tx5hz
I0613 01:22:06.944] batch.sh:53: Successful get job/test-job --namespace=test-jobs {{.metadata.name}}: test-job
I0613 01:22:06.944] (BNAME       COMPLETIONS   DURATION   AGE
I0613 01:22:06.944] test-job   0/1           0s         0s
I0613 01:22:07.034] Name:           test-job
I0613 01:22:07.035] Namespace:      test-jobs
... skipping 3 lines ...
I0613 01:22:07.035]                 run=pi
I0613 01:22:07.035] Annotations:    cronjob.kubernetes.io/instantiate: manual
I0613 01:22:07.035] Controlled By:  CronJob/pi
I0613 01:22:07.035] Parallelism:    1
I0613 01:22:07.036] Completions:    1
I0613 01:22:07.036] Start Time:     Thu, 13 Jun 2019 01:22:06 +0000
I0613 01:22:07.036] Pods Statuses:  1 Running / 0 Succeeded / 0 Failed
I0613 01:22:07.036] Pod Template:
I0613 01:22:07.036]   Labels:  controller-uid=49c631c0-191f-4fa9-b356-cf98af85b2c7
I0613 01:22:07.036]            job-name=test-job
I0613 01:22:07.036]            run=pi
I0613 01:22:07.036]   Containers:
I0613 01:22:07.036]    pi:
... skipping 15 lines ...
I0613 01:22:07.037]   Type    Reason            Age   From            Message
I0613 01:22:07.037]   ----    ------            ----  ----            -------
I0613 01:22:07.038]   Normal  SuccessfulCreate  1s    job-controller  Created pod: test-job-tx5hz
I0613 01:22:07.118] job.batch "test-job" deleted
I0613 01:22:07.208] cronjob.batch "pi" deleted
I0613 01:22:07.296] namespace "test-jobs" deleted
W0613 01:22:07.397] E0613 01:22:07.239722   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:07.398] E0613 01:22:07.380893   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:07.490] E0613 01:22:07.490136   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:07.592] E0613 01:22:07.591975   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:08.242] E0613 01:22:08.241550   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:08.383] E0613 01:22:08.382597   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:08.494] E0613 01:22:08.491725   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:08.594] E0613 01:22:08.593543   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:09.243] E0613 01:22:09.243222   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:09.385] E0613 01:22:09.384237   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:09.494] E0613 01:22:09.493582   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:09.595] E0613 01:22:09.595066   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:10.245] E0613 01:22:10.244745   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:10.388] E0613 01:22:10.388118   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:10.495] E0613 01:22:10.495248   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:10.597] E0613 01:22:10.596751   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:11.247] E0613 01:22:11.247088   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:11.390] E0613 01:22:11.389630   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:11.497] E0613 01:22:11.496952   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:11.599] E0613 01:22:11.598380   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:12.249] E0613 01:22:12.248635   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:12.391] E0613 01:22:12.391017   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:22:12.492] +++ exit code: 0
I0613 01:22:12.492] Recording: run_create_job_tests
I0613 01:22:12.493] Running command: run_create_job_tests
I0613 01:22:12.493] 
I0613 01:22:12.493] +++ Running case: test-cmd.run_create_job_tests 
I0613 01:22:12.493] +++ working dir: /go/src/k8s.io/kubernetes
... skipping 5 lines ...
I0613 01:22:12.835] create.sh:86: Successful get job test-job {{(index .spec.template.spec.containers 0).image}}: k8s.gcr.io/nginx:test-cmd
I0613 01:22:12.919] (Bjob.batch "test-job" deleted
I0613 01:22:13.006] job.batch/test-job-pi created
I0613 01:22:13.109] create.sh:92: Successful get job test-job-pi {{(index .spec.template.spec.containers 0).image}}: k8s.gcr.io/perl
I0613 01:22:13.190] (Bjob.batch "test-job-pi" deleted
I0613 01:22:13.287] cronjob.batch/test-pi created
W0613 01:22:13.388] E0613 01:22:12.498334   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:13.389] E0613 01:22:12.599595   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:13.389] I0613 01:22:12.730101   52013 event.go:258] Event(v1.ObjectReference{Kind:"Job", Namespace:"namespace-1560388932-13139", Name:"test-job", UID:"effe6351-b409-47db-961e-2f0fd15e7dcc", APIVersion:"batch/v1", ResourceVersion:"1453", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: test-job-2chdl
W0613 01:22:13.389] I0613 01:22:13.005759   52013 event.go:258] Event(v1.ObjectReference{Kind:"Job", Namespace:"namespace-1560388932-13139", Name:"test-job-pi", UID:"ad579355-92b5-41d3-9f4f-2f4a07f460e1", APIVersion:"batch/v1", ResourceVersion:"1461", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: test-job-pi-h5dc2
W0613 01:22:13.390] E0613 01:22:13.250480   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:13.390] kubectl run --generator=cronjob/v1beta1 is DEPRECATED and will be removed in a future version. Use kubectl run --generator=run-pod/v1 or kubectl create instead.
W0613 01:22:13.390] I0613 01:22:13.387247   52013 event.go:258] Event(v1.ObjectReference{Kind:"Job", Namespace:"namespace-1560388932-13139", Name:"my-pi", UID:"38d01e77-1bf2-4dd4-b69a-a4211022cef8", APIVersion:"batch/v1", ResourceVersion:"1470", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: my-pi-m2gp9
W0613 01:22:13.393] E0613 01:22:13.392829   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:22:13.494] job.batch/my-pi created
I0613 01:22:13.498] Successful
I0613 01:22:13.499] message:[perl -Mbignum=bpi -wle print bpi(10)]
I0613 01:22:13.499] has:perl -Mbignum=bpi -wle print bpi(10)
I0613 01:22:13.579] job.batch "my-pi" deleted
I0613 01:22:13.664] cronjob.batch "test-pi" deleted
... skipping 7 lines ...
I0613 01:22:13.771] +++ [0613 01:22:13] Creating namespace namespace-1560388933-21364
I0613 01:22:13.845] namespace/namespace-1560388933-21364 created
I0613 01:22:13.915] Context "test" modified.
I0613 01:22:13.922] +++ [0613 01:22:13] Testing pod templates
I0613 01:22:14.016] core.sh:1419: Successful get podtemplates {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:22:14.205] (Bpodtemplate/nginx created
W0613 01:22:14.306] E0613 01:22:13.500049   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:14.307] E0613 01:22:13.601179   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:14.307] I0613 01:22:14.201543   48662 controller.go:606] quota admission added evaluator for: podtemplates
W0613 01:22:14.307] E0613 01:22:14.252284   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:14.395] E0613 01:22:14.394406   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:22:14.495] core.sh:1423: Successful get podtemplates {{range.items}}{{.metadata.name}}:{{end}}: nginx:
I0613 01:22:14.496] (BNAME    CONTAINERS   IMAGES   POD LABELS
I0613 01:22:14.496] nginx   nginx        nginx    name=nginx
I0613 01:22:14.577] core.sh:1431: Successful get podtemplates {{range.items}}{{.metadata.name}}:{{end}}: nginx:
I0613 01:22:14.661] (Bpodtemplate "nginx" deleted
I0613 01:22:14.758] core.sh:1435: Successful get podtemplate {{range.items}}{{.metadata.name}}:{{end}}: 
... skipping 68 lines ...
I0613 01:22:15.755] Port:              <unset>  6379/TCP
I0613 01:22:15.755] TargetPort:        6379/TCP
I0613 01:22:15.755] Endpoints:         <none>
I0613 01:22:15.756] Session Affinity:  None
I0613 01:22:15.756] Events:            <none>
I0613 01:22:15.756] (B
W0613 01:22:15.857] E0613 01:22:14.501691   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:15.857] E0613 01:22:14.603050   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:15.857] E0613 01:22:15.253653   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:15.857] E0613 01:22:15.395306   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:15.858] E0613 01:22:15.503277   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:15.858] E0613 01:22:15.604883   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:22:15.958] Successful describe services:
I0613 01:22:15.959] Name:              kubernetes
I0613 01:22:15.959] Namespace:         default
I0613 01:22:15.959] Labels:            component=apiserver
I0613 01:22:15.959]                    provider=kubernetes
I0613 01:22:15.959] Annotations:       <none>
... skipping 238 lines ...
I0613 01:22:16.917]   selector:
I0613 01:22:16.917]     role: padawan
I0613 01:22:16.917]   sessionAffinity: None
I0613 01:22:16.917]   type: ClusterIP
I0613 01:22:16.917] status:
I0613 01:22:16.917]   loadBalancer: {}
W0613 01:22:17.018] E0613 01:22:16.255388   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:17.018] E0613 01:22:16.397002   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:17.018] E0613 01:22:16.504838   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:17.018] E0613 01:22:16.606520   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:17.018] error: you must specify resources by --filename when --local is set.
W0613 01:22:17.018] Example resource specifications include:
W0613 01:22:17.019]    '-f rsrc.yaml'
W0613 01:22:17.019]    '--filename=rsrc.json'
I0613 01:22:17.119] core.sh:886: Successful get services redis-master {{range.spec.selector}}{{.}}:{{end}}: redis:master:backend:
I0613 01:22:17.262] (Bcore.sh:893: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:
I0613 01:22:17.350] (Bservice "redis-master" deleted
I0613 01:22:17.447] core.sh:900: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
I0613 01:22:17.547] (Bcore.sh:904: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
I0613 01:22:17.728] (Bservice/redis-master created
W0613 01:22:17.829] E0613 01:22:17.256894   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:17.830] E0613 01:22:17.398463   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:17.830] E0613 01:22:17.506220   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:17.830] E0613 01:22:17.608393   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:22:17.931] core.sh:908: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:
I0613 01:22:17.942] (Bcore.sh:912: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:
I0613 01:22:18.131] (Bservice/service-v1-test created
I0613 01:22:18.236] core.sh:933: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:service-v1-test:
I0613 01:22:18.417] (Bservice/service-v1-test replaced
W0613 01:22:18.518] E0613 01:22:18.258259   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:18.518] E0613 01:22:18.400125   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:18.518] E0613 01:22:18.507743   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:18.611] E0613 01:22:18.610294   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:22:18.712] core.sh:940: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:service-v1-test:
I0613 01:22:18.712] (Bservice "redis-master" deleted
I0613 01:22:18.717] service "service-v1-test" deleted
I0613 01:22:18.837] core.sh:948: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
I0613 01:22:18.938] (Bcore.sh:952: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
I0613 01:22:19.123] (Bservice/redis-master created
W0613 01:22:19.260] E0613 01:22:19.260060   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:22:19.361] service/redis-slave created
I0613 01:22:19.424] core.sh:957: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:redis-slave:
I0613 01:22:19.513] (BSuccessful
I0613 01:22:19.513] message:NAME           RSRC
I0613 01:22:19.514] kubernetes     145
I0613 01:22:19.514] redis-master   1503
I0613 01:22:19.514] redis-slave    1506
I0613 01:22:19.514] has:redis-master
I0613 01:22:19.617] core.sh:967: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:redis-slave:
I0613 01:22:19.705] (Bservice "redis-master" deleted
I0613 01:22:19.715] service "redis-slave" deleted
W0613 01:22:19.816] E0613 01:22:19.401482   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:19.816] E0613 01:22:19.509776   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:19.817] E0613 01:22:19.611664   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:22:19.917] core.sh:974: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
I0613 01:22:19.934] (Bcore.sh:978: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
I0613 01:22:20.017] (Bservice/beep-boop created
I0613 01:22:20.118] core.sh:982: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: beep-boop:kubernetes:
I0613 01:22:20.221] (Bcore.sh:986: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: beep-boop:kubernetes:
I0613 01:22:20.310] (Bservice "beep-boop" deleted
W0613 01:22:20.411] E0613 01:22:20.261412   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:20.411] E0613 01:22:20.402917   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:20.512] E0613 01:22:20.511431   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:20.594] kubectl run --generator=deployment/apps.v1 is DEPRECATED and will be removed in a future version. Use kubectl run --generator=run-pod/v1 or kubectl create instead.
W0613 01:22:20.614] E0613 01:22:20.614366   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:20.615] I0613 01:22:20.614790   52013 event.go:258] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"default", Name:"testmetadata", UID:"f45a42ce-24d6-4d7d-a9e6-27c4910c22a2", APIVersion:"apps/v1", ResourceVersion:"1519", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set testmetadata-6b5b57544d to 2
W0613 01:22:20.623] I0613 01:22:20.623055   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"default", Name:"testmetadata-6b5b57544d", UID:"ead12af7-3721-4f44-9e97-c978bed737e9", APIVersion:"apps/v1", ResourceVersion:"1520", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: testmetadata-6b5b57544d-df498
W0613 01:22:20.630] I0613 01:22:20.629255   52013 event.go:258] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"default", Name:"testmetadata-6b5b57544d", UID:"ead12af7-3721-4f44-9e97-c978bed737e9", APIVersion:"apps/v1", ResourceVersion:"1520", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: testmetadata-6b5b57544d-db79g
I0613 01:22:20.730] core.sh:993: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
I0613 01:22:20.731] (Bcore.sh:997: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:22:20.731] (Bservice/testmetadata created
... skipping 15 lines ...
I0613 01:22:21.345] +++ [0613 01:22:21] Creating namespace namespace-1560388941-5134
I0613 01:22:21.426] namespace/namespace-1560388941-5134 created
I0613 01:22:21.507] Context "test" modified.
I0613 01:22:21.516] +++ [0613 01:22:21] Testing kubectl(v1:daemonsets)
I0613 01:22:21.615] apps.sh:30: Successful get daemonsets {{range.items}}{{.metadata.name}}:{{end}}: 
I0613 01:22:21.794] (Bdaemonset.apps/bind created
W0613 01:22:21.895] E0613 01:22:21.263002   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:21.896] E0613 01:22:21.404370   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:21.896] E0613 01:22:21.513154   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:21.896] E0613 01:22:21.616336   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
I0613 01:22:21.997] apps.sh:34: Successful get daemonsets bind {{.spec.templateGeneration}}: 1
I0613 01:22:22.088] (Bdaemonset.apps/bind configured
I0613 01:22:22.193] apps.sh:37: Successful get daemonsets bind {{.spec.templateGeneration}}: 1
I0613 01:22:22.294] (Bdaemonset.extensions/bind image updated
I0613 01:22:22.397] apps.sh:40: Successful get daemonsets bind {{.spec.templateGeneration}}: 2
I0613 01:22:22.498] (Bdaemonset.extensions/bind env updated
... skipping 43 lines ...
I0613 01:22:24.856] (Bapps.sh:84: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
I0613 01:22:24.944] (Bapps.sh:85: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 2
I0613 01:22:25.048] (Bdaemonset.extensions/bind rolled back
I0613 01:22:25.141] apps.sh:88: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:2.0:
I0613 01:22:25.232] (Bapps.sh:89: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
I0613 01:22:25.338] (BSuccessful
I0613 01:22:25.338] message:error: unable to find specified revision 1000000 in history
I0613 01:22:25.339] has:unable to find specified revision
I0613 01:22:25.432] apps.sh:93: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:2.0:
I0613 01:22:25.524] (Bapps.sh:94: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
I0613 01:22:25.630] (Bdaemonset.extensions/bind rolled back
I0613 01:22:25.735] apps.sh:97: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:latest:
I0613 01:22:25.827] (Bapps.sh:98: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
... skipping 22 lines ...
I0613 01:22:27.178] Namespace:    namespace-1560388946-3059
I0613 01:22:27.178] Selector:     app=guestbook,tier=frontend
I0613 01:22:27.178] Labels:       app=guestbook
I0613 01:22:27.178]               tier=frontend
I0613 01:22:27.178] Annotations:  <none>
I0613 01:22:27.178] Replicas:     3 current / 3 desired
I0613 01:22:27.178] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0613 01:22:27.178] Pod Template:
I0613 01:22:27.179]   Labels:  app=guestbook
I0613 01:22:27.179]            tier=frontend
I0613 01:22:27.179]   Containers:
I0613 01:22:27.179]    php-redis:
I0613 01:22:27.179]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 17 lines ...
I0613 01:22:27.291] Namespace:    namespace-1560388946-3059
I0613 01:22:27.291] Selector:     app=guestbook,tier=frontend
I0613 01:22:27.291] Labels:       app=guestbook
I0613 01:22:27.292]               tier=frontend
I0613 01:22:27.292] Annotations:  <none>
I0613 01:22:27.292] Replicas:     3 current / 3 desired
I0613 01:22:27.292] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0613 01:22:27.292] Pod Template:
I0613 01:22:27.292]   Labels:  app=guestbook
I0613 01:22:27.292]            tier=frontend
I0613 01:22:27.292]   Containers:
I0613 01:22:27.293]    php-redis:
I0613 01:22:27.293]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 18 lines ...
I0613 01:22:27.397] Namespace:    namespace-1560388946-3059
I0613 01:22:27.397] Selector:     app=guestbook,tier=frontend
I0613 01:22:27.397] Labels:       app=guestbook
I0613 01:22:27.397]               tier=frontend
I0613 01:22:27.397] Annotations:  <none>
I0613 01:22:27.397] Replicas:     3 current / 3 desired
I0613 01:22:27.397] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0613 01:22:27.398] Pod Template:
I0613 01:22:27.398]   Labels:  app=guestbook
I0613 01:22:27.398]            tier=frontend
I0613 01:22:27.398]   Containers:
I0613 01:22:27.398]    php-redis:
I0613 01:22:27.398]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 4 lines ...
I0613 01:22:27.399]       memory:  100Mi
I0613 01:22:27.399]     Environment:
I0613 01:22:27.399]       GET_HOSTS_FROM:  dns
I0613 01:22:27.399]     Mounts:            <none>
I0613 01:22:27.399]   Volumes:             <none>
I0613 01:22:27.399] (B
W0613 01:22:27.500] E0613 01:22:22.264601   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:27.500] I0613 01:22:22.290156   48662 controller.go:606] quota admission added evaluator for: daemonsets.extensions
W0613 01:22:27.500] E0613 01:22:22.406035   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:27.500] E0613 01:22:22.515026   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:27.501] E0613 01:22:22.617994   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:27.501] E0613 01:22:23.266178   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:27.501] E0613 01:22:23.407889   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:27.501] E0613 01:22:23.516330   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:27.501] E0613 01:22:23.619529   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:27.502] E0613 01:22:24.267417   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:27.502] E0613 01:22:24.409073   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:27.502] E0613 01:22:24.517639   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:27.502] E0613 01:22:24.621104   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:27.502] E0613 01:22:25.269330   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:27.502] E0613 01:22:25.411138   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:27.503] E0613 01:22:25.519196   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:27.503] E0613 01:22:25.622194   52013 reflector.go:125] k8s.io/client-go/dynamic/dynamicinformer/informer.go:90: Failed to list *unstructured.Unstructured: the server could not find the requested resource
W0613 01:22:27.507] E0613 01:22:25.662784   52013 daemon_controller.go:302] namespace-1560388943-21722/bind failed with : error storing status for daemon set &v1.DaemonSet{TypeMeta:v1.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:v1.ObjectMeta{Name:"bind", GenerateName:"", Namespace:"namespace-1560388943-21722", SelfLink:"/apis/apps/v1/namespaces/namespace-1560388943-21722/daemonsets/bind", UID:"1ba88ffc-d2f6-4c7a-9b53-fabed61f6858", ResourceVersion:"1590", Generation:4, CreationTimestamp:v1.Time{Time:time.Time{wall:0x0, ext:63695985743, loc:(*time.Location)(0x72e5ba0)}}, DeletionTimestamp:(*v1.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"service":"bind"}, Annotations:map[string]string{"deprecated.daemonset.template.generation":"4", "kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"apps/v1\",\"kind\":\"DaemonSet\",\"metadata\":{\"annotations\":{\"kubernetes.io/change-cause\":\"kubectl apply --filename=hack/testdata/rollingupdate-daemonset-rv2.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true\"},\"labels\":{\"service\":\"bind\"},\"name\":\"bind\",\"namespace\":\"namespace-1560388943-21722\"},\"spec\":{\"selector\":{\"matchLabels\":{\"service\":\"bind\"}},\"template\":{\"metadata\":{\"labels\":{\"service\":\"bind\"}},\"spec\":{\"affinity\":{\"podAntiAffinity\":{\"requiredDuringSchedulingIgnoredDuringExecution\":[{\"labelSelector\":{\"matchExpressions\":[{\"key\":\"service\",\"operator\":\"In\",\"values\":[\"bind\"]}]},\"namespaces\":[],\"topologyKey\":\"kubernetes.io/hostname\"}]}},\"containers\":[{\"image\":\"k8s.gcr.io/pause:latest\",\"name\":\"kubernetes-pause\"},{\"image\":\"k8s.gcr.io/nginx:test-cmd\",\"name\":\"app\"}]}},\"updateStrategy\":{\"rollingUpdate\":{\"maxUnavailable\":\"10%\"},\"type\":\"RollingUpdate\"}}}\n", "kubernetes.io/change-cause":"kubectl apply --filename=hack/testdata/rollingupdate-daemonset-rv2.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true"}, OwnerReferences:[]v1.OwnerReference(nil), Initializers:(*v1.Initializers)(nil), Finalizers:[]string(nil), ClusterName:"", ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:"kubectl", Operation:"Update", APIVersion:"apps/v1", Time:(*v1.Time)(0xc0019c08e0), Fields:(*v1.Fields)(0xc0012c9a20)}, v1.ManagedFieldsEntry{Manager:"kubectl", Operation:"Update", APIVersion:"apps/v1", Time:(*v1.Time)(0xc0019c1020), Fields:(*v1.Fields)(0xc0012c9ab8)}, v1.ManagedFieldsEntry{Manager:"kube-controller-manager", Operation:"Update", APIVersion:"apps/v1", Time:(*v1.Time)(0xc0019c10c0), Fields:(*v1.Fields)(0xc0012c9ae8)}, v1.ManagedFieldsEntry{Manager:"kubectl", Operation:"Update", APIVersion:"apps/v1", Time:(*v1.Time)(0xc0019c1140), Fields:(*v1.Fields)(0xc0012c9b10)}}}, Spec:v1.DaemonSetSpec{Selector:(*v1.LabelSelector)(0xc0019c15a0), Template:v1.PodTemplateSpec{ObjectMeta:v1.ObjectMeta{Name:"", GenerateName:"", Namespace:"", SelfLink:"", UID:"", ResourceVersion:"", Generation:0, CreationTimestamp:v1.Time{Time:time.Time{wall:0x0, ext:0, loc:(*time.Location)(nil)}}, DeletionTimestamp:(*v1.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"service":"bind"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Initializers:(*v1.Initializers)(nil), Finalizers:[]string(nil), ClusterName:"", ManagedFields:[]v1.ManagedFieldsEntry(nil)}, Spec:v1.PodSpec{Volumes:[]v1.Volume(nil), InitContainers:[]v1.Container(nil), Containers:[]v1.Container{v1.Container{Name:"kubernetes-pause", Image:"k8s.gcr.io/pause:latest", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]v1.ContainerPort(nil), EnvFrom:[]v1.EnvFromSource(nil), Env:[]v1.EnvVar(nil), Resources:v1.ResourceRequirements{Limits:v1.ResourceList(nil), Requests:v1.ResourceList(nil)}, VolumeMounts:[]v1.VolumeMount(nil), VolumeDevices:[]v1.VolumeDevice(nil), LivenessProbe:(*v1.Probe)(nil), ReadinessProbe:(*v1.Probe)(nil), Lifecycle:(*v1.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", TerminationMessagePolicy:"File", ImagePullPolicy:"IfNotPresent", SecurityContext:(*v1.SecurityContext)(nil), Stdin:false, StdinOnce:false, TTY:false}, v1.Container{Name:"app", Image:"k8s.gcr.io/nginx:test-cmd", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]v1.ContainerPort(nil), EnvFrom:[]v1.EnvFromSource(nil), Env:[]v1.EnvVar(nil), Resources