PR | bobbypage: WIP: verify specs for mirror pods |
Result | ABORTED |
Tests | 0 failed / 140 succeeded |
Started | |
Elapsed | 12m36s |
Revision | 4a550f0ad22bd9d69c2ebba3036fd835da00973a |
Refs |
116726 |
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdCompletion
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdCompletion/shell_not_expected
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdCompletion/unsupported_shell_type
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitAPIPort
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitAPIPort/accept_a_valid_port_number
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitAPIPort/fail_on_negative_port_number
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitAPIPort/fail_on_non-string_port
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitAPIPort/fail_on_too_large_port_number
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitConfig
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitConfig/can't_load_old_component_config
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitConfig/can't_load_v1beta1_config
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitConfig/can't_load_v1beta2_config
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitConfig/can_load_current_component_config
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitConfig/can_load_v1beta3_config
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitConfig/don't_allow_mixed_arguments_v1beta3
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitConfig/fail_on_non_existing_path
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitFeatureGates
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitFeatureGates/feature_gate_PublicKeysECDSA=true
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitFeatureGates/no_feature_gates_passed
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitKubernetesVersion
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitKubernetesVersion/invalid_semantic_version_string_is_detected
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitKubernetesVersion/valid_version_is_accepted
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitToken
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitToken/invalid_token_non-lowercase
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitToken/invalid_token_size
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitToken/valid_token_is_accepted
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinArgsMixed
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinArgsMixed/discovery-token_and_config
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinBadArgs
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinBadArgs/discovery-token_and_discovery-file_can't_both_be_set
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinBadArgs/discovery-token_or_discovery-file_must_be_set
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinConfig
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinConfig/config
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinConfig/config_path
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinDiscoveryFile
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinDiscoveryFile/invalid_discovery_file
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinDiscoveryFile/valid_discovery_file
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinDiscoveryToken
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinDiscoveryToken/valid_discovery_token
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinDiscoveryToken/valid_discovery_token_url
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinNodeName
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinNodeName/valid_node_name
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinTLSBootstrapToken
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinTLSBootstrapToken/valid_bootstrap_token
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinTLSBootstrapToken/valid_bootstrap_token_url
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinToken
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinToken/valid_token
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinToken/valid_token_url
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdTokenDelete
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdTokenDelete/invalid_token
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdTokenDelete/no_token_provided
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdTokenGenerate
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdTokenGenerateTypoError
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdVersion
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdVersion/default_output
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdVersion/invalid_output_option
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdVersion/short_output
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdVersionOutputJsonOrYaml
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdVersionOutputJsonOrYaml/json_output
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdVersionOutputJsonOrYaml/yaml_output
test-cmd run_RESTMapper_evaluation_tests
test-cmd run_assert_categories_tests
test-cmd run_assert_short_name_tests
test-cmd run_assert_singular_name_tests
test-cmd run_authorization_tests
test-cmd run_certificates_tests
test-cmd run_client_config_tests
test-cmd run_cluster_management_tests
test-cmd run_clusterroles_tests
test-cmd run_configmap_tests
test-cmd run_convert_tests
test-cmd run_crd_deletion_recreation_tests
test-cmd run_crd_tests
test-cmd run_create_job_tests
test-cmd run_create_secret_tests
test-cmd run_daemonset_history_tests
test-cmd run_daemonset_tests
test-cmd run_deployment_tests
test-cmd run_deprecated_api_tests
test-cmd run_exec_credentials_interactive_tests
test-cmd run_exec_credentials_tests
test-cmd run_impersonation_tests
test-cmd run_job_tests
test-cmd run_kubectl_all_namespace_tests
test-cmd run_kubectl_apply_deployments_tests
test-cmd run_kubectl_apply_tests
test-cmd run_kubectl_config_set_cluster_tests
test-cmd run_kubectl_config_set_credentials_tests
test-cmd run_kubectl_config_set_tests
test-cmd run_kubectl_create_error_tests
test-cmd run_kubectl_create_filter_tests
test-cmd run_kubectl_create_kustomization_directory_tests
test-cmd run_kubectl_create_validate_tests
test-cmd run_kubectl_debug_baseline_node_tests
test-cmd run_kubectl_debug_baseline_tests
test-cmd run_kubectl_debug_general_node_tests
test-cmd run_kubectl_debug_general_tests
test-cmd run_kubectl_debug_node_tests
test-cmd run_kubectl_debug_pod_tests
test-cmd run_kubectl_delete_allnamespaces_tests
test-cmd run_kubectl_diff_same_names
test-cmd run_kubectl_diff_tests
test-cmd run_kubectl_events_tests
test-cmd run_kubectl_exec_pod_tests
test-cmd run_kubectl_exec_resource_name_tests
test-cmd run_kubectl_explain_tests
test-cmd run_kubectl_get_tests
test-cmd run_kubectl_help_tests
test-cmd run_kubectl_local_proxy_tests
test-cmd run_kubectl_request_timeout_tests
test-cmd run_kubectl_results_tests
test-cmd run_kubectl_run_tests
test-cmd run_kubectl_server_side_apply_tests
test-cmd run_kubectl_sort_by_tests
test-cmd run_kubectl_version_tests
test-cmd run_lists_tests
test-cmd run_multi_resources_tests
test-cmd run_namespace_tests
test-cmd run_nodes_tests
test-cmd run_persistent_volume_claims_tests
test-cmd run_persistent_volumes_tests
test-cmd run_plugins_tests
test-cmd run_pod_templates_tests
test-cmd run_pod_tests
test-cmd run_rc_tests
test-cmd run_recursive_resources_tests
test-cmd run_resource_aliasing_tests
test-cmd run_retrieve_multiple_tests
test-cmd run_role_tests
test-cmd run_rs_tests
test-cmd run_save_config_tests
test-cmd run_secrets_test
test-cmd run_service_accounts_tests
test-cmd run_service_tests
test-cmd run_stateful_set_tests
test-cmd run_statefulset_history_tests
test-cmd run_storage_class_tests
test-cmd run_swagger_tests
test-cmd run_template_output_tests
test-cmd run_wait_tests
... skipping 53 lines ... Recording: record_command_canary Running command: record_command_canary +++ Running case: test-cmd.record_command_canary +++ working dir: /home/prow/go/src/k8s.io/kubernetes +++ command: record_command_canary /home/prow/go/src/k8s.io/kubernetes/test/cmd/legacy-script.sh: line 165: bogus-expected-to-fail: command not found !!! [0317 21:02:51] Call tree: !!! [0317 21:02:51] 1: /home/prow/go/src/k8s.io/kubernetes/test/cmd/../../third_party/forked/shell2junit/sh2ju.sh:47 record_command_canary(...) !!! [0317 21:02:51] 2: /home/prow/go/src/k8s.io/kubernetes/test/cmd/../../third_party/forked/shell2junit/sh2ju.sh:112 eVal(...) !!! [0317 21:02:51] 3: /home/prow/go/src/k8s.io/kubernetes/test/cmd/legacy-script.sh:141 juLog(...) !!! [0317 21:02:51] 4: /home/prow/go/src/k8s.io/kubernetes/test/cmd/legacy-script.sh:169 record_command(...) !!! [0317 21:02:51] 5: hack/make-rules/test-cmd.sh:35 source(...) +++ exit code: 1 +++ error: 1 +++ [0317 21:02:51] Running kubeadm tests +++ [0317 21:02:51] WARNING: linux/arm will no longer be built/shipped by default, please build it explicitly if needed. +++ [0317 21:02:51] support for linux/arm will be removed in a subsequent release. go version go1.20.2 linux/amd64 +++ [0317 21:02:55] Building go targets for linux/amd64 k8s.io/kubernetes/cmd/kubeadm (static) ... skipping 233 lines ... I0317 21:06:12.486768 20027 controller.go:85] Starting OpenAPI V3 controller I0317 21:06:12.486787 20027 naming_controller.go:291] Starting NamingConditionController I0317 21:06:12.486805 20027 establishing_controller.go:76] Starting EstablishingController I0317 21:06:12.486821 20027 nonstructuralschema_controller.go:192] Starting NonStructuralSchemaConditionController I0317 21:06:12.486839 20027 apiapproval_controller.go:186] Starting KubernetesAPIApprovalPolicyConformantConditionController I0317 21:06:12.486855 20027 crd_finalizer.go:266] Starting CRDFinalizer E0317 21:06:12.566665 20027 controller.go:146] "Failed to ensure lease exists, will retry" err="namespaces \"kube-system\" not found" interval="200ms" I0317 21:06:12.585750 20027 cache.go:39] Caches are synced for autoregister controller I0317 21:06:12.585821 20027 shared_informer.go:318] Caches are synced for cluster_authentication_trust_controller I0317 21:06:12.586109 20027 apf_controller.go:366] Running API Priority and Fairness config worker I0317 21:06:12.586238 20027 apf_controller.go:369] Running API Priority and Fairness periodic rebalancing process I0317 21:06:12.586303 20027 cache.go:39] Caches are synced for AvailableConditionController controller I0317 21:06:12.586124 20027 cache.go:39] Caches are synced for APIServiceRegistrationController controller ... skipping 19 lines ... go version go1.20.2 linux/amd64 +++ [0317 21:06:16] Building go targets for linux/amd64 k8s.io/kubernetes/cmd/kube-controller-manager (static) +++ [0317 21:06:56] Generate kubeconfig for controller-manager +++ [0317 21:06:57] Starting controller-manager I0317 21:06:57.572818 23124 serving.go:348] Generated self-signed cert in-memory W0317 21:06:58.206968 23124 authentication.go:426] failed to read in-cluster kubeconfig for delegated authentication: open /var/run/secrets/kubernetes.io/serviceaccount/token: no such file or directory W0317 21:06:58.207009 23124 authentication.go:320] No authentication-kubeconfig provided in order to lookup client-ca-file in configmap/extension-apiserver-authentication in kube-system, so client certificate authentication won't work. W0317 21:06:58.207022 23124 authentication.go:344] No authentication-kubeconfig provided in order to lookup requestheader-client-ca-file in configmap/extension-apiserver-authentication in kube-system, so request-header client certificate authentication won't work. W0317 21:06:58.207035 23124 authorization.go:225] failed to read in-cluster kubeconfig for delegated authorization: open /var/run/secrets/kubernetes.io/serviceaccount/token: no such file or directory W0317 21:06:58.207052 23124 authorization.go:193] No authorization-kubeconfig provided, so SubjectAccessReview of authorization tokens won't work. I0317 21:06:58.207562 23124 controllermanager.go:187] "Starting" version="v1.27.0-beta.0.16+0d0966cd54d33d" I0317 21:06:58.207602 23124 controllermanager.go:189] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" I0317 21:06:58.209774 23124 secure_serving.go:210] Serving securely on [::]:10257 I0317 21:06:58.209919 23124 tlsconfig.go:240] "Starting DynamicServingCertificateController" I0317 21:06:58.210124 23124 leaderelection.go:245] attempting to acquire leader lease kube-system/kube-controller-manager... ... skipping 8 lines ... W0317 21:06:58.240406 23124 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage. I0317 21:06:58.240423 23124 controllermanager.go:638] "Started controller" controller="nodelifecycle" I0317 21:06:58.240605 23124 node_lifecycle_controller.go:465] "Sending events to api server" I0317 21:06:58.240679 23124 node_lifecycle_controller.go:476] "Starting node controller" I0317 21:06:58.240690 23124 shared_informer.go:311] Waiting for caches to sync for taint W0317 21:06:58.240784 23124 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage. E0317 21:06:58.240880 23124 core.go:92] "Failed to start service controller" err="WARNING: no cloud provider provided, services of type LoadBalancer will fail" I0317 21:06:58.240910 23124 controllermanager.go:616] "Warning: skipping controller" controller="service" E0317 21:06:58.241111 23124 core.go:213] "Failed to start cloud node lifecycle controller" err="no cloud provider provided" I0317 21:06:58.241134 23124 controllermanager.go:616] "Warning: skipping controller" controller="cloud-node-lifecycle" W0317 21:06:58.241317 23124 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage. W0317 21:06:58.241339 23124 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage. W0317 21:06:58.241408 23124 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage. W0317 21:06:58.241425 23124 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage. W0317 21:06:58.241439 23124 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage. ... skipping 197 lines ... I0317 21:06:58.568367 23124 shared_informer.go:318] Caches are synced for endpoint I0317 21:06:58.568372 23124 shared_informer.go:318] Caches are synced for crt configmap I0317 21:06:58.568391 23124 shared_informer.go:318] Caches are synced for endpoint_slice I0317 21:06:58.660022 23124 shared_informer.go:318] Caches are synced for resource quota I0317 21:06:58.676690 23124 shared_informer.go:318] Caches are synced for resource quota node/127.0.0.1 created I0317 21:06:58.921982 23124 actual_state_of_world.go:547] "Failed to update statusUpdateNeeded field in actual state of world" err="Failed to set statusUpdateNeeded to needed true, because nodeName=\"127.0.0.1\" does not exist" +++ [0317 21:06:58] Checking kubectl version WARNING: This version information is deprecated and will be replaced with the output from kubectl version --short. Use --output=yaml|json to get the full version. Client Version: version.Info{Major:"1", Minor:"27+", GitVersion:"v1.27.0-beta.0.16+0d0966cd54d33d", GitCommit:"0d0966cd54d33d15042f82a7c095f3a40fd18083", GitTreeState:"clean", BuildDate:"2023-03-17T19:07:07Z", GoVersion:"go1.20.2", Compiler:"gc", Platform:"linux/amd64"} Kustomize Version: v5.0.1 Server Version: version.Info{Major:"1", Minor:"27+", GitVersion:"v1.27.0-beta.0.16+0d0966cd54d33d", GitCommit:"0d0966cd54d33d15042f82a7c095f3a40fd18083", GitTreeState:"clean", BuildDate:"2023-03-17T19:07:07Z", GoVersion:"go1.20.2", Compiler:"gc", Platform:"linux/amd64"} I0317 21:06:58.997093 23124 shared_informer.go:318] Caches are synced for garbage collector I0317 21:06:59.072793 23124 shared_informer.go:318] Caches are synced for garbage collector I0317 21:06:59.072843 23124 garbagecollector.go:166] "All resource monitors have synced. Proceeding to collect garbage" The Service "kubernetes" is invalid: spec.clusterIPs: Invalid value: []string{"10.0.0.1"}: failed to allocate IP 10.0.0.1: provided IP is already allocated NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE kubernetes ClusterIP 10.0.0.1 <none> 443/TCP 44s Recording: run_kubectl_version_tests Running command: run_kubectl_version_tests +++ Running case: test-cmd.run_kubectl_version_tests ... skipping 196 lines ... +++ working dir: /home/prow/go/src/k8s.io/kubernetes +++ command: run_RESTMapper_evaluation_tests +++ [0317 21:07:04] Creating namespace namespace-1679087224-6058 namespace/namespace-1679087224-6058 created Context "test" modified. +++ [0317 21:07:04] Testing RESTMapper +++ [0317 21:07:04] "kubectl get unknownresourcetype" returns error as expected: error: the server doesn't have a resource type "unknownresourcetype" +++ exit code: 0 NAME SHORTNAMES APIVERSION NAMESPACED KIND bindings v1 true Binding componentstatuses cs v1 false ComponentStatus configmaps cm v1 true ConfigMap endpoints ep v1 true Endpoints ... skipping 60 lines ... namespace/namespace-1679087227-12907 created Context "test" modified. +++ [0317 21:07:07] Testing clusterroles [32mrbac.sh:29: Successful get clusterroles/cluster-admin {{.metadata.name}}: cluster-admin (B[m[32mrbac.sh:30: Successful get clusterrolebindings/cluster-admin {{.metadata.name}}: cluster-admin (B[m[32mSuccessful (B[mmessage:Error from server (NotFound): clusterroles.rbac.authorization.k8s.io "pod-admin" not found has:clusterroles.rbac.authorization.k8s.io "pod-admin" not found clusterrole.rbac.authorization.k8s.io/pod-admin created (dry run) clusterrole.rbac.authorization.k8s.io/pod-admin created (server dry run) [32mSuccessful (B[mmessage:Error from server (NotFound): clusterroles.rbac.authorization.k8s.io "pod-admin" not found has:clusterroles.rbac.authorization.k8s.io "pod-admin" not found clusterrole.rbac.authorization.k8s.io/pod-admin created [32mrbac.sh:42: Successful get clusterrole/pod-admin {{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}: *: (B[m[32mSuccessful (B[mmessage:Warning: deleting cluster-scoped resources, not scoped to the provided namespace clusterrole.rbac.authorization.k8s.io "pod-admin" deleted ... skipping 18 lines ... (B[mclusterrole.rbac.authorization.k8s.io/url-reader created [32mrbac.sh:61: Successful get clusterrole/url-reader {{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}: get: (B[m[32mrbac.sh:62: Successful get clusterrole/url-reader {{range.rules}}{{range.nonResourceURLs}}{{.}}:{{end}}{{end}}: /logs/*:/healthz/*: (B[mclusterrole.rbac.authorization.k8s.io/aggregation-reader created [32mrbac.sh:64: Successful get clusterrole/aggregation-reader {{.metadata.name}}: aggregation-reader (B[m[32mSuccessful (B[mmessage:Error from server (NotFound): clusterrolebindings.rbac.authorization.k8s.io "super-admin" not found has:clusterrolebindings.rbac.authorization.k8s.io "super-admin" not found clusterrolebinding.rbac.authorization.k8s.io/super-admin created (dry run) clusterrolebinding.rbac.authorization.k8s.io/super-admin created (server dry run) [32mSuccessful (B[mmessage:Error from server (NotFound): clusterrolebindings.rbac.authorization.k8s.io "super-admin" not found has:clusterrolebindings.rbac.authorization.k8s.io "super-admin" not found clusterrolebinding.rbac.authorization.k8s.io/super-admin created [32mrbac.sh:77: Successful get clusterrolebinding/super-admin {{range.subjects}}{{.name}}:{{end}}: super-admin: (B[mclusterrolebinding.rbac.authorization.k8s.io/super-admin subjects updated (dry run) clusterrolebinding.rbac.authorization.k8s.io/super-admin subjects updated (server dry run) [32mrbac.sh:80: Successful get clusterrolebinding/super-admin {{range.subjects}}{{.name}}:{{end}}: super-admin: ... skipping 64 lines ... [32mrbac.sh:102: Successful get clusterrolebinding/super-admin {{range.subjects}}{{.name}}:{{end}}: super-admin:foo:test-all-user: (B[m[32mrbac.sh:103: Successful get clusterrolebinding/super-group {{range.subjects}}{{.name}}:{{end}}: the-group:foo:test-all-user: (B[m[32mrbac.sh:104: Successful get clusterrolebinding/super-sa {{range.subjects}}{{.name}}:{{end}}: sa-name:foo:test-all-user: (B[mrolebinding.rbac.authorization.k8s.io/admin created (dry run) rolebinding.rbac.authorization.k8s.io/admin created (server dry run) [32mSuccessful (B[mmessage:Error from server (NotFound): rolebindings.rbac.authorization.k8s.io "admin" not found has: not found rolebinding.rbac.authorization.k8s.io/admin created [32mrbac.sh:113: Successful get rolebinding/admin {{.roleRef.kind}}: ClusterRole (B[m[32mrbac.sh:114: Successful get rolebinding/admin {{range.subjects}}{{.name}}:{{end}}: default-admin: (B[mrolebinding.rbac.authorization.k8s.io/admin subjects updated [32mrbac.sh:116: Successful get rolebinding/admin {{range.subjects}}{{.name}}:{{end}}: default-admin:foo: ... skipping 152 lines ... namespace/namespace-1679087235-21420 created Context "test" modified. +++ [0317 21:07:15] Testing role role.rbac.authorization.k8s.io/pod-admin created (dry run) role.rbac.authorization.k8s.io/pod-admin created (server dry run) [32mSuccessful (B[mmessage:Error from server (NotFound): roles.rbac.authorization.k8s.io "pod-admin" not found has: not found role.rbac.authorization.k8s.io/pod-admin created [32mrbac.sh:159: Successful get role/pod-admin {{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}: *: (B[m[32mrbac.sh:160: Successful get role/pod-admin {{range.rules}}{{range.resources}}{{.}}:{{end}}{{end}}: pods: (B[m[32mrbac.sh:161: Successful get role/pod-admin {{range.rules}}{{range.apiGroups}}{{.}}:{{end}}{{end}}: : (B[m[32mSuccessful ... skipping 623 lines ... has:valid-pod [32mSuccessful (B[mmessage:NAME READY STATUS RESTARTS AGE valid-pod 0/1 Pending 0 1s has:valid-pod [32mcore.sh:194: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod: (B[merror: resource(s) were provided, but no name was specified [32mcore.sh:198: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod: (B[m[32mcore.sh:202: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod: (B[merror: setting 'all' parameter but found a non empty selector. [32mcore.sh:206: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod: (B[m[32mcore.sh:210: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod: (B[mWarning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. pod "valid-pod" force deleted [32mcore.sh:214: Successful get pods -lname=valid-pod {{range.items}}{{.metadata.name}}:{{end}}: (B[m[32mcore.sh:219: Successful get namespaces {{range.items}}{{ if eq .metadata.name "test-kubectl-describe-pod" }}found{{end}}{{end}}:: : ... skipping 30 lines ... I0317 21:07:34.538084 28290 round_trippers.go:553] GET https://127.0.0.1:6443/apis/policy/v1/namespaces/test-kubectl-describe-pod/poddisruptionbudgets/test-pdb-2 200 OK in 1 milliseconds I0317 21:07:34.539864 28290 round_trippers.go:553] GET https://127.0.0.1:6443/api/v1/namespaces/test-kubectl-describe-pod/events?fieldSelector=involvedObject.name%3Dtest-pdb-2%2CinvolvedObject.namespace%3Dtest-kubectl-describe-pod%2CinvolvedObject.kind%3DPodDisruptionBudget%2CinvolvedObject.uid%3D35e103bb-e29e-447a-a9c4-9d2a854f496e&limit=500 200 OK in 1 milliseconds (B[mpoddisruptionbudget.policy/test-pdb-3 created [32mcore.sh:271: Successful get pdb/test-pdb-3 --namespace=test-kubectl-describe-pod {{.spec.maxUnavailable}}: 2 (B[mpoddisruptionbudget.policy/test-pdb-4 created [32mcore.sh:275: Successful get pdb/test-pdb-4 --namespace=test-kubectl-describe-pod {{.spec.maxUnavailable}}: 50% (B[merror: min-available and max-unavailable cannot be both specified [32mcore.sh:281: Successful get pods --namespace=test-kubectl-describe-pod {{range.items}}{{.metadata.name}}:{{end}}: (B[mpod/env-test-pod created matched TEST_CMD_1 matched <set to the key 'key-1' in secret 'test-secret'> matched TEST_CMD_2 matched <set to the key 'key-2' of config map 'test-configmap'> ... skipping 242 lines ... [32mcore.sh:542: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: registry.k8s.io/pause:3.9: (B[m[32mSuccessful (B[mmessage:kubectl-create kubectl-patch has:kubectl-patch pod/valid-pod patched [32mcore.sh:562: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: nginx: (B[m+++ [0317 21:07:51] "kubectl patch with resourceVersion 623" returns error as expected: Error from server (Conflict): Operation cannot be fulfilled on pods "valid-pod": the object has been modified; please apply your changes to the latest version and try again pod "valid-pod" deleted pod/valid-pod replaced [32mcore.sh:586: Successful get pod valid-pod {{(index .spec.containers 0).name}}: replaced-k8s-serve-hostname (B[m[32mSuccessful (B[mmessage:kubectl-replace has:kubectl-replace [32mSuccessful (B[mmessage:error: --grace-period must have --force specified has:\-\-grace-period must have \-\-force specified [32mSuccessful (B[mmessage:error: --timeout must have --force specified has:\-\-timeout must have \-\-force specified I0317 21:07:52.939555 23124 actual_state_of_world.go:547] "Failed to update statusUpdateNeeded field in actual state of world" err="Failed to set statusUpdateNeeded to needed true, because nodeName=\"node-v1-test\" does not exist" node/node-v1-test created [32mcore.sh:614: Successful get node node-v1-test {{range.items}}{{if .metadata.annotations.a}}found{{end}}{{end}}:: : (B[mnode/node-v1-test replaced (server dry run) I0317 21:07:53.349119 23124 event.go:307] "Event occurred" object="node-v1-test" fieldPath="" kind="Node" apiVersion="v1" type="Normal" reason="RegisteredNode" message="Node node-v1-test event: Registered Node node-v1-test in Controller" node/node-v1-test replaced (dry run) [32mcore.sh:639: Successful get node node-v1-test {{range.items}}{{if .metadata.annotations.a}}found{{end}}{{end}}:: : ... skipping 31 lines ... spec: containers: - image: registry.k8s.io/pause:3.9 name: kubernetes-pause has:localonlyvalue [32mcore.sh:691: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod (B[merror: 'name' already has a value (valid-pod), and --overwrite is false [32mcore.sh:695: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod (B[m[32mcore.sh:699: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod (B[mpod/valid-pod labeled [32mcore.sh:703: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod-super-sayan (B[m[32mcore.sh:707: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod: (B[mWarning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. ... skipping 85 lines ... +++ Running case: test-cmd.run_kubectl_create_error_tests +++ working dir: /home/prow/go/src/k8s.io/kubernetes +++ command: run_kubectl_create_error_tests +++ [0317 21:08:04] Creating namespace namespace-1679087284-21870 namespace/namespace-1679087284-21870 created Context "test" modified. +++ [0317 21:08:04] Testing kubectl create with error Error: must specify one of -f and -k Create a resource from a file or from stdin. JSON and YAML formats are accepted. Examples: ... skipping 63 lines ... If true, keep the managedFields when printing objects in JSON or YAML format. --template='': Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview]. --validate='strict': Must be one of: strict (or true), warn, ignore (or false). "true" or "strict" will use a schema to validate the input and fail the request if invalid. It will perform server side validation if ServerSideFieldValidation is enabled on the api-server, but will fall back to less reliable client-side validation if not. "warn" will warn about unknown or duplicate fields without blocking the request if server-side field validation is enabled on the API server, and behave as "ignore" otherwise. "false" or "ignore" will not perform any schema validation, silently dropping any unknown or duplicate fields. --windows-line-endings=false: Only relevant if --edit=true. Defaults to the line ending native to your platform. Usage: kubectl create -f FILENAME [options] ... skipping 38 lines ... I0317 21:08:07.410047 23124 event.go:307] "Event occurred" object="namespace-1679087284-311/test-deployment-retainkeys-d65c44c97" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: test-deployment-retainkeys-d65c44c97-7n5jq" deployment.apps "test-deployment-retainkeys" deleted [32mapply.sh:88: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: (B[mpod/selector-test-pod created [32mapply.sh:92: Successful get pods selector-test-pod {{.metadata.labels.name}}: selector-test-pod (B[m[32mSuccessful (B[mmessage:Error from server (NotFound): pods "selector-test-pod-dont-apply" not found has:pods "selector-test-pod-dont-apply" not found pod "selector-test-pod" deleted [32mapply.sh:101: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: (B[mpod/test-pod created (dry run) pod/test-pod created (server dry run) [32mapply.sh:107: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: ... skipping 31 lines ... (B[mpod/b created [32mapply.sh:207: Successful get pods a {{.metadata.name}}: a (B[m[32mapply.sh:208: Successful get pods b -n nsb {{.metadata.name}}: b (B[mpod "a" deleted pod "b" deleted [32mSuccessful (B[mmessage:error: all resources selected for prune without explicitly passing --all. To prune all resources, pass the --all flag. If you did not mean to prune all resources, specify a label selector has:all resources selected for prune without explicitly passing --all pod/a created pod/b created I0317 21:08:17.537148 20027 alloc.go:330] "allocated clusterIPs" service="namespace-1679087284-311/prune-svc" clusterIPs=map[IPv4:10.0.0.224] service/prune-svc created W0317 21:08:17.537917 32385 prune.go:71] Deprecated: kubectl apply will no longer prune non-namespaced resources by default when used with the --namespace flag in a future release. To preserve the current behaviour, list the resources you want to target explicitly in the --prune-allowlist flag. ... skipping 45 lines ... (B[mpod/b unchanged W0317 21:08:35.751443 32753 prune.go:71] Deprecated: kubectl apply will no longer prune non-namespaced resources by default when used with the --namespace flag in a future release. To preserve the current behaviour, list the resources you want to target explicitly in the --prune-allowlist flag. pod/a pruned [32mapply.sh:265: Successful get pods -n nsb {{range.items}}{{.metadata.name}}:{{end}}: b: (B[mnamespace "nsb" deleted [32mSuccessful (B[mmessage:error: the namespace from the provided object "nsb" does not match the namespace "foo". You must pass '--namespace=nsb' to perform this operation. has:the namespace from the provided object "nsb" does not match the namespace "foo". [32mapply.sh:276: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: (B[mservice/a created [32mapply.sh:280: Successful get services a {{.metadata.name}}: a (B[m[32mSuccessful (B[mmessage:The Service "a" is invalid: spec.clusterIPs[0]: Invalid value: []string{"10.0.0.12"}: may not change once set ... skipping 28 lines ... (B[m[32mapply.sh:302: Successful get deployment test-the-deployment {{.metadata.name}}: test-the-deployment (B[m[32mapply.sh:303: Successful get service test-the-service {{.metadata.name}}: test-the-service (B[mconfigmap "test-the-map" deleted service "test-the-service" deleted deployment.apps "test-the-deployment" deleted [32mSuccessful (B[mmessage:Error from server (NotFound): namespaces "multi-resource-ns" not found has:namespaces "multi-resource-ns" not found [32mapply.sh:311: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: (B[m[32mSuccessful (B[mmessage:namespace/multi-resource-ns created Error from server (NotFound): error when creating "hack/testdata/multi-resource-1.yaml": namespaces "multi-resource-ns" not found has:namespaces "multi-resource-ns" not found [32mSuccessful (B[mmessage:Error from server (NotFound): pods "test-pod" not found has:pods "test-pod" not found pod/test-pod created namespace/multi-resource-ns unchanged [32mapply.sh:319: Successful get pods test-pod -n multi-resource-ns {{.metadata.name}}: test-pod (B[mpod "test-pod" deleted namespace "multi-resource-ns" deleted I0317 21:08:47.484941 23124 namespace_controller.go:182] "Namespace has been deleted" namespace="nsb" [32mapply.sh:325: Successful get configmaps --field-selector=metadata.name=foo {{range.items}}{{.metadata.name}}:{{end}}: (B[m[32mSuccessful (B[mmessage:configmap/foo created error: resource mapping not found for name: "foo" namespace: "" from "hack/testdata/multi-resource-2.yaml": no matches for kind "Bogus" in version "example.com/v1" ensure CRDs are installed first has:no matches for kind "Bogus" in version "example.com/v1" [32mapply.sh:331: Successful get configmaps foo {{.metadata.name}}: foo (B[mconfigmap "foo" deleted [32mapply.sh:337: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: (B[m[32mSuccessful ... skipping 6 lines ... (B[mpod "pod-a" deleted pod "pod-c" deleted [32mapply.sh:345: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: (B[m[32mapply.sh:349: Successful get crds {{range.items}}{{.metadata.name}}:{{end}}: (B[m[32mSuccessful (B[mmessage:customresourcedefinition.apiextensions.k8s.io/widgets.example.com created error: resource mapping not found for name: "foo" namespace: "" from "hack/testdata/multi-resource-4.yaml": no matches for kind "Widget" in version "example.com/v1" ensure CRDs are installed first has:no matches for kind "Widget" in version "example.com/v1" I0317 21:08:53.516418 20027 handler.go:165] Adding GroupVersion example.com v1 to ResourceManager customresourcedefinition.apiextensions.k8s.io/widgets.example.com condition met [32mSuccessful (B[mmessage:Error from server (NotFound): widgets.example.com "foo" not found has:widgets.example.com "foo" not found [32mapply.sh:356: Successful get crds widgets.example.com {{.metadata.name}}: widgets.example.com (B[mI0317 21:08:56.388038 20027 controller.go:624] quota admission added evaluator for: widgets.example.com widget.example.com/foo created customresourcedefinition.apiextensions.k8s.io/widgets.example.com unchanged [32mapply.sh:359: Successful get widget foo {{.metadata.name}}: foo ... skipping 34 lines ... (B[mmessage:908 has:908 pod "test-pod" deleted [32mapply.sh:415: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: (B[m+++ [0317 21:08:59] Testing upgrade kubectl client-side apply to server-side apply pod/test-pod created error: Apply failed with 1 conflict: conflict with "kubectl-client-side-apply" using v1: .metadata.labels.name Please review the fields above--they currently have other managers. Here are the ways you can resolve this warning: * If you intend to manage all of these fields, please re-run the apply command with the `--force-conflicts` flag. * If you do not intend to manage all of the fields, please edit your manifest to remove references to the fields that should keep their ... skipping 153 lines ... (B[mpod "nginx-extensions" deleted [32mSuccessful (B[mmessage:pod/test1 created has:pod/test1 created pod "test1" deleted [32mSuccessful (B[mmessage:error: Invalid image name "InvalidImageName": invalid reference format has:error: Invalid image name "InvalidImageName": invalid reference format +++ exit code: 0 Recording: run_kubectl_create_filter_tests Running command: run_kubectl_create_filter_tests +++ Running case: test-cmd.run_kubectl_create_filter_tests +++ working dir: /home/prow/go/src/k8s.io/kubernetes ... skipping 3 lines ... Context "test" modified. +++ [0317 21:09:06] Testing kubectl create filter [32mcreate.sh:50: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: (B[mpod/selector-test-pod created [32mcreate.sh:54: Successful get pods selector-test-pod {{.metadata.labels.name}}: selector-test-pod (B[m[32mSuccessful (B[mmessage:Error from server (NotFound): pods "selector-test-pod-dont-apply" not found has:pods "selector-test-pod-dont-apply" not found pod "selector-test-pod" deleted +++ exit code: 0 Recording: run_kubectl_apply_deployments_tests Running command: run_kubectl_apply_deployments_tests ... skipping 18 lines ... [32mapps.sh:165: Successful get deployments my-depl {{.spec.template.metadata.labels.l1}}: l1 (B[m[32mapps.sh:166: Successful get deployments my-depl {{.spec.selector.matchLabels.l1}}: l1 (B[m[32mapps.sh:167: Successful get deployments my-depl {{.metadata.labels.l1}}: <no value> (B[mdeployment.apps "my-depl" deleted replicaset.apps "my-depl-bfb57d6df" deleted pod "my-depl-bfb57d6df-hmj8m" deleted E0317 21:09:08.965657 23124 replica_set.go:544] sync "namespace-1679087347-32359/my-depl-bfb57d6df" failed with Operation cannot be fulfilled on replicasets.apps "my-depl-bfb57d6df": StorageError: invalid object, Code: 4, Key: /registry/replicasets/namespace-1679087347-32359/my-depl-bfb57d6df, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 20377430-afaa-41e4-88fd-2b81931eb545, UID in object meta: [32mapps.sh:173: Successful get deployments {{range.items}}{{.metadata.name}}:{{end}}: (B[m[32mapps.sh:174: Successful get replicasets {{range.items}}{{.metadata.name}}:{{end}}: (B[m[32mapps.sh:175: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: (B[m[32mapps.sh:179: Successful get deployments {{range.items}}{{.metadata.name}}:{{end}}: (B[mdeployment.apps/nginx created I0317 21:09:09.571949 23124 event.go:307] "Event occurred" object="namespace-1679087347-32359/nginx" fieldPath="" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-5645b79496 to 3" I0317 21:09:09.585788 23124 event.go:307] "Event occurred" object="namespace-1679087347-32359/nginx-5645b79496" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-5645b79496-wljmw" I0317 21:09:09.606068 23124 event.go:307] "Event occurred" object="namespace-1679087347-32359/nginx-5645b79496" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-5645b79496-h2kdf" I0317 21:09:09.606344 23124 event.go:307] "Event occurred" object="namespace-1679087347-32359/nginx-5645b79496" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-5645b79496-drrvh" [32mapps.sh:183: Successful get deployment nginx {{.metadata.name}}: nginx (B[m[32mSuccessful (B[mmessage:Error from server (Conflict): error when applying patch: {"metadata":{"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"apps/v1\",\"kind\":\"Deployment\",\"metadata\":{\"annotations\":{},\"labels\":{\"name\":\"nginx\"},\"name\":\"nginx\",\"namespace\":\"namespace-1679087347-32359\",\"resourceVersion\":\"99\"},\"spec\":{\"replicas\":3,\"selector\":{\"matchLabels\":{\"name\":\"nginx2\"}},\"template\":{\"metadata\":{\"labels\":{\"name\":\"nginx2\"}},\"spec\":{\"containers\":[{\"image\":\"registry.k8s.io/nginx:test-cmd\",\"name\":\"nginx\",\"ports\":[{\"containerPort\":80}]}]}}}}\n"},"resourceVersion":"99"},"spec":{"selector":{"matchLabels":{"name":"nginx2"}},"template":{"metadata":{"labels":{"name":"nginx2"}}}}} to: Resource: "apps/v1, Resource=deployments", GroupVersionKind: "apps/v1, Kind=Deployment" Name: "nginx", Namespace: "namespace-1679087347-32359" for: "hack/testdata/deployment-label-change2.yaml": error when patching "hack/testdata/deployment-label-change2.yaml": Operation cannot be fulfilled on deployments.apps "nginx": the object has been modified; please apply your changes to the latest version and try again has:Error from server (Conflict) deployment.apps/nginx configured I0317 21:09:18.237147 23124 event.go:307] "Event occurred" object="namespace-1679087347-32359/nginx" fieldPath="" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-5675dfc785 to 3" I0317 21:09:18.258665 23124 event.go:307] "Event occurred" object="namespace-1679087347-32359/nginx-5675dfc785" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-5675dfc785-hwr65" I0317 21:09:18.307791 23124 event.go:307] "Event occurred" object="namespace-1679087347-32359/nginx-5675dfc785" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-5675dfc785-clxpf" I0317 21:09:18.307960 23124 event.go:307] "Event occurred" object="namespace-1679087347-32359/nginx-5675dfc785" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-5675dfc785-mzqqm" [32mSuccessful ... skipping 538 lines ... +++ [0317 21:09:43] Creating namespace namespace-1679087383-27574 namespace/namespace-1679087383-27574 created Context "test" modified. +++ [0317 21:09:44] Testing kubectl get [32mget.sh:29: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: (B[m[32mSuccessful (B[mmessage:Error from server (NotFound): pods "abc" not found has:pods "abc" not found [32mget.sh:37: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: (B[m[32mSuccessful (B[mmessage:Error from server (NotFound): pods "abc" not found has:pods "abc" not found [32mget.sh:45: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: (B[m[32mSuccessful (B[mmessage:{ "apiVersion": "v1", "items": [], ... skipping 21 lines ... has not:No resources found [32mSuccessful (B[mmessage:NAME has not:No resources found [32mget.sh:73: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: (B[m[32mSuccessful (B[mmessage:error: the server doesn't have a resource type "foobar" has not:No resources found [32mSuccessful (B[mmessage:No resources found in namespace-1679087383-27574 namespace. has:No resources found [32mSuccessful (B[mmessage: has not:No resources found [32mSuccessful (B[mmessage:No resources found in namespace-1679087383-27574 namespace. has:No resources found [32mget.sh:93: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: (B[m[32mSuccessful (B[mmessage:Error from server (NotFound): pods "abc" not found has:pods "abc" not found [32mSuccessful (B[mmessage:Error from server (NotFound): pods "abc" not found has not:List [32mSuccessful (B[mmessage:I0317 21:09:45.328673 35992 loader.go:373] Config loaded from file: /tmp/tmp.uL2QQvcwka/.kube/config I0317 21:09:45.334012 35992 round_trippers.go:553] GET https://127.0.0.1:6443/version?timeout=32s 200 OK in 4 milliseconds I0317 21:09:45.350700 35992 round_trippers.go:553] GET https://127.0.0.1:6443/api/v1/namespaces/default/pods 200 OK in 1 milliseconds I0317 21:09:45.352450 35992 round_trippers.go:553] GET https://127.0.0.1:6443/api/v1/namespaces/default/replicationcontrollers 200 OK in 1 milliseconds ... skipping 597 lines ... } [32mget.sh:158: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod: (B[m<no value>[32mSuccessful (B[mmessage:valid-pod: has:valid-pod: [32mSuccessful (B[mmessage:error: error executing jsonpath "{.missing}": Error executing template: missing is not found. Printing more information for debugging the template: template was: {.missing} object given to jsonpath engine was: map[string]interface {}{"apiVersion":"v1", "kind":"Pod", "metadata":map[string]interface {}{"creationTimestamp":"2023-03-17T21:09:53Z", "labels":map[string]interface {}{"name":"valid-pod"}, "managedFields":[]interface {}{map[string]interface {}{"apiVersion":"v1", "fieldsType":"FieldsV1", "fieldsV1":map[string]interface {}{"f:metadata":map[string]interface {}{"f:labels":map[string]interface {}{".":map[string]interface {}{}, "f:name":map[string]interface {}{}}}, "f:spec":map[string]interface {}{"f:containers":map[string]interface {}{"k:{\"name\":\"kubernetes-serve-hostname\"}":map[string]interface {}{".":map[string]interface {}{}, "f:image":map[string]interface {}{}, "f:imagePullPolicy":map[string]interface {}{}, "f:name":map[string]interface {}{}, "f:resources":map[string]interface {}{".":map[string]interface {}{}, "f:limits":map[string]interface {}{".":map[string]interface {}{}, "f:cpu":map[string]interface {}{}, "f:memory":map[string]interface {}{}}, "f:requests":map[string]interface {}{".":map[string]interface {}{}, "f:cpu":map[string]interface {}{}, "f:memory":map[string]interface {}{}}}, "f:terminationMessagePath":map[string]interface {}{}, "f:terminationMessagePolicy":map[string]interface {}{}}}, "f:dnsPolicy":map[string]interface {}{}, "f:enableServiceLinks":map[string]interface {}{}, "f:restartPolicy":map[string]interface {}{}, "f:schedulerName":map[string]interface {}{}, "f:securityContext":map[string]interface {}{}, "f:terminationGracePeriodSeconds":map[string]interface {}{}}}, "manager":"kubectl-create", "operation":"Update", "time":"2023-03-17T21:09:53Z"}}, "name":"valid-pod", "namespace":"namespace-1679087392-21374", "resourceVersion":"1142", "uid":"51a846c8-44b8-4939-9db6-1fb8734db5ff"}, "spec":map[string]interface {}{"containers":[]interface {}{map[string]interface {}{"image":"registry.k8s.io/serve_hostname", "imagePullPolicy":"Always", "name":"kubernetes-serve-hostname", "resources":map[string]interface {}{"limits":map[string]interface {}{"cpu":"1", "memory":"512Mi"}, "requests":map[string]interface {}{"cpu":"1", "memory":"512Mi"}}, "terminationMessagePath":"/dev/termination-log", "terminationMessagePolicy":"File"}}, "dnsPolicy":"ClusterFirst", "enableServiceLinks":true, "preemptionPolicy":"PreemptLowerPriority", "priority":0, "restartPolicy":"Always", "schedulerName":"default-scheduler", "securityContext":map[string]interface {}{}, "terminationGracePeriodSeconds":30}, "status":map[string]interface {}{"phase":"Pending", "qosClass":"Guaranteed"}} has:missing is not found error: error executing template "{{.missing}}": template: output:1:2: executing "output" at <.missing>: map has no entry for key "missing" [32mSuccessful (B[mmessage:Error executing template: template: output:1:2: executing "output" at <.missing>: map has no entry for key "missing". Printing more information for debugging the template: template was: {{.missing}} raw data was: {"apiVersion":"v1","kind":"Pod","metadata":{"creationTimestamp":"2023-03-17T21:09:53Z","labels":{"name":"valid-pod"},"managedFields":[{"apiVersion":"v1","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:name":{}}},"f:spec":{"f:containers":{"k:{\"name\":\"kubernetes-serve-hostname\"}":{".":{},"f:image":{},"f:imagePullPolicy":{},"f:name":{},"f:resources":{".":{},"f:limits":{".":{},"f:cpu":{},"f:memory":{}},"f:requests":{".":{},"f:cpu":{},"f:memory":{}}},"f:terminationMessagePath":{},"f:terminationMessagePolicy":{}}},"f:dnsPolicy":{},"f:enableServiceLinks":{},"f:restartPolicy":{},"f:schedulerName":{},"f:securityContext":{},"f:terminationGracePeriodSeconds":{}}},"manager":"kubectl-create","operation":"Update","time":"2023-03-17T21:09:53Z"}],"name":"valid-pod","namespace":"namespace-1679087392-21374","resourceVersion":"1142","uid":"51a846c8-44b8-4939-9db6-1fb8734db5ff"},"spec":{"containers":[{"image":"registry.k8s.io/serve_hostname","imagePullPolicy":"Always","name":"kubernetes-serve-hostname","resources":{"limits":{"cpu":"1","memory":"512Mi"},"requests":{"cpu":"1","memory":"512Mi"}},"terminationMessagePath":"/dev/termination-log","terminationMessagePolicy":"File"}],"dnsPolicy":"ClusterFirst","enableServiceLinks":true,"preemptionPolicy":"PreemptLowerPriority","priority":0,"restartPolicy":"Always","schedulerName":"default-scheduler","securityContext":{},"terminationGracePeriodSeconds":30},"status":{"phase":"Pending","qosClass":"Guaranteed"}} object given to template engine was: map[apiVersion:v1 kind:Pod metadata:map[creationTimestamp:2023-03-17T21:09:53Z labels:map[name:valid-pod] managedFields:[map[apiVersion:v1 fieldsType:FieldsV1 fieldsV1:map[f:metadata:map[f:labels:map[.:map[] f:name:map[]]] f:spec:map[f:containers:map[k:{"name":"kubernetes-serve-hostname"}:map[.:map[] f:image:map[] f:imagePullPolicy:map[] f:name:map[] f:resources:map[.:map[] f:limits:map[.:map[] f:cpu:map[] f:memory:map[]] f:requests:map[.:map[] f:cpu:map[] f:memory:map[]]] f:terminationMessagePath:map[] f:terminationMessagePolicy:map[]]] f:dnsPolicy:map[] f:enableServiceLinks:map[] f:restartPolicy:map[] f:schedulerName:map[] f:securityContext:map[] f:terminationGracePeriodSeconds:map[]]] manager:kubectl-create operation:Update time:2023-03-17T21:09:53Z]] name:valid-pod namespace:namespace-1679087392-21374 resourceVersion:1142 uid:51a846c8-44b8-4939-9db6-1fb8734db5ff] spec:map[containers:[map[image:registry.k8s.io/serve_hostname imagePullPolicy:Always name:kubernetes-serve-hostname resources:map[limits:map[cpu:1 memory:512Mi] requests:map[cpu:1 memory:512Mi]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:File]] dnsPolicy:ClusterFirst enableServiceLinks:true preemptionPolicy:PreemptLowerPriority priority:0 restartPolicy:Always schedulerName:default-scheduler securityContext:map[] terminationGracePeriodSeconds:30] status:map[phase:Pending qosClass:Guaranteed]] has:map has no entry for key "missing" [32mSuccessful (B[mmessage:NAME READY STATUS RESTARTS AGE valid-pod 0/1 Pending 0 0s has:valid-pod [32mSuccessful (B[mmessage:Error from server (NotFound): the server could not find the requested resource has:the server could not find the requested resource [32mSuccessful (B[mmessage:NAME READY STATUS RESTARTS AGE valid-pod 0/1 Pending 0 0s has:STATUS [32mSuccessful ... skipping 78 lines ... terminationGracePeriodSeconds: 30 status: phase: Pending qosClass: Guaranteed has:name: valid-pod [32mSuccessful (B[mmessage:Error from server (NotFound): pods "invalid-pod" not found has:"invalid-pod" not found pod "valid-pod" deleted [32mget.sh:204: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: (B[mpod/redis-master created pod/valid-pod created [32mSuccessful ... skipping 1136 lines ... +++ [0317 21:10:07] Creating namespace namespace-1679087407-6698 namespace/namespace-1679087407-6698 created Context "test" modified. +++ [0317 21:10:07] Testing kubectl exec POD COMMAND [32mSuccessful (B[mmessage:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead. Error from server (NotFound): pods "abc" not found has:pods "abc" not found [32mSuccessful (B[mmessage:error: cannot exec into multiple objects at a time has:cannot exec into multiple objects at a time pod/test-pod created [32mSuccessful (B[mmessage:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead. Error from server (BadRequest): pod test-pod does not have a host assigned has not:pods "test-pod" not found [32mSuccessful (B[mmessage:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead. Error from server (BadRequest): pod test-pod does not have a host assigned has not:pod or type/name must be specified pod "test-pod" deleted +++ exit code: 0 Recording: run_kubectl_exec_resource_name_tests Running command: run_kubectl_exec_resource_name_tests ... skipping 3 lines ... +++ [0317 21:10:08] Creating namespace namespace-1679087408-29950 namespace/namespace-1679087408-29950 created Context "test" modified. +++ [0317 21:10:08] Testing kubectl exec TYPE/NAME COMMAND [32mSuccessful (B[mmessage:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead. error: the server doesn't have a resource type "foo" has:error: [32mSuccessful (B[mmessage:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead. Error from server (NotFound): deployments.apps "bar" not found has:"bar" not found pod/test-pod created replicaset.apps/frontend created I0317 21:10:09.310600 23124 event.go:307] "Event occurred" object="namespace-1679087408-29950/frontend" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-jpvx4" I0317 21:10:09.333568 23124 event.go:307] "Event occurred" object="namespace-1679087408-29950/frontend" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-9fr7g" I0317 21:10:09.333612 23124 event.go:307] "Event occurred" object="namespace-1679087408-29950/frontend" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-n2gzx" configmap/test-set-env-config created [32mSuccessful (B[mmessage:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead. error: cannot attach to *v1.ConfigMap: selector for *v1.ConfigMap not implemented has:not implemented [32mSuccessful (B[mmessage:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead. Error from server (BadRequest): pod test-pod does not have a host assigned has not:not found [32mSuccessful (B[mmessage:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead. Error from server (BadRequest): pod test-pod does not have a host assigned has not:pod, type/name or --filename must be specified [32mSuccessful (B[mmessage:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead. Error from server (BadRequest): pod frontend-9fr7g does not have a host assigned has not:not found [32mSuccessful (B[mmessage:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead. Error from server (BadRequest): pod frontend-9fr7g does not have a host assigned has not:pod, type/name or --filename must be specified pod "test-pod" deleted replicaset.apps "frontend" deleted configmap "test-set-env-config" deleted +++ exit code: 0 Recording: run_create_secret_tests Running command: run_create_secret_tests +++ Running case: test-cmd.run_create_secret_tests +++ working dir: /home/prow/go/src/k8s.io/kubernetes +++ command: run_create_secret_tests [32mSuccessful (B[mmessage:Error from server (NotFound): secrets "mysecret" not found has:secrets "mysecret" not found [32mSuccessful (B[mmessage:user-specified has:user-specified [32mSuccessful (B[mmessage:Error from server (NotFound): secrets "mysecret" not found has:secrets "mysecret" not found [32mSuccessful (B[m{"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"tester-update-cm","namespace":"default","uid":"2c682f08-66b6-4064-ab8e-38e501ad7a92","resourceVersion":"1243","creationTimestamp":"2023-03-17T21:10:10Z"}} [32mSuccessful (B[mmessage:{"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"tester-update-cm","namespace":"default","uid":"2c682f08-66b6-4064-ab8e-38e501ad7a92","resourceVersion":"1244","creationTimestamp":"2023-03-17T21:10:10Z"},"data":{"key1":"config1"}} has:uid [32mSuccessful (B[mmessage:{"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"tester-update-cm","namespace":"default","uid":"2c682f08-66b6-4064-ab8e-38e501ad7a92","resourceVersion":"1244","creationTimestamp":"2023-03-17T21:10:10Z"},"data":{"key1":"config1"}} has:config1 {"kind":"Status","apiVersion":"v1","metadata":{},"status":"Success","details":{"name":"tester-update-cm","kind":"configmaps","uid":"2c682f08-66b6-4064-ab8e-38e501ad7a92"}} [32mSuccessful (B[mmessage:Error from server (NotFound): configmaps "tester-update-cm" not found has:configmaps "tester-update-cm" not found +++ exit code: 0 Recording: run_kubectl_create_kustomization_directory_tests Running command: run_kubectl_create_kustomization_directory_tests +++ Running case: test-cmd.run_kubectl_create_kustomization_directory_tests ... skipping 25 lines ... +++ command: run_kubectl_create_validate_tests +++ [0317 21:10:12] Creating namespace namespace-1679087412-14948 namespace/namespace-1679087412-14948 created Context "test" modified. +++ [0317 21:10:12] Testing kubectl create --validate Successful message:Error from server (BadRequest): error when creating "hack/testdata/invalid-deployment-unknown-and-duplicate-fields.yaml": Deployment in version "v1" cannot be handled as a Deployment: strict decoding error: unknown field "spec.baz", unknown field "spec.foo" has either:strict decoding error or:error validating data +++ [0317 21:10:12] Testing kubectl create --validate=true Successful message:Error from server (BadRequest): error when creating "hack/testdata/invalid-deployment-unknown-and-duplicate-fields.yaml": Deployment in version "v1" cannot be handled as a Deployment: strict decoding error: unknown field "spec.baz", unknown field "spec.foo" has either:strict decoding error or:error validating data +++ [0317 21:10:12] Testing kubectl create --validate=false I0317 21:10:12.688498 23124 namespace_controller.go:182] "Namespace has been deleted" namespace="test-events" [32mSuccessful (B[mmessage:deployment.apps/invalid-nginx-deployment created has:deployment.apps/invalid-nginx-deployment created I0317 21:10:12.730687 23124 event.go:307] "Event occurred" object="namespace-1679087412-14948/invalid-nginx-deployment" fieldPath="" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set invalid-nginx-deployment-cbdccf466 to 4" I0317 21:10:12.767134 23124 event.go:307] "Event occurred" object="namespace-1679087412-14948/invalid-nginx-deployment-cbdccf466" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: invalid-nginx-deployment-cbdccf466-n2cwh" deployment.apps "invalid-nginx-deployment" deleted I0317 21:10:12.790787 23124 event.go:307] "Event occurred" object="namespace-1679087412-14948/invalid-nginx-deployment-cbdccf466" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: invalid-nginx-deployment-cbdccf466-m7z8p" I0317 21:10:12.790821 23124 event.go:307] "Event occurred" object="namespace-1679087412-14948/invalid-nginx-deployment-cbdccf466" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: invalid-nginx-deployment-cbdccf466-sjwv9" +++ [0317 21:10:12] Testing kubectl create --validate=strict I0317 21:10:12.813069 23124 event.go:307] "Event occurred" object="namespace-1679087412-14948/invalid-nginx-deployment-cbdccf466" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: invalid-nginx-deployment-cbdccf466-wl9lr" E0317 21:10:12.826163 23124 replica_set.go:544] sync "namespace-1679087412-14948/invalid-nginx-deployment-cbdccf466" failed with replicasets.apps "invalid-nginx-deployment-cbdccf466" not found Successful message:Error from server (BadRequest): error when creating "hack/testdata/invalid-deployment-unknown-and-duplicate-fields.yaml": Deployment in version "v1" cannot be handled as a Deployment: strict decoding error: unknown field "spec.baz", unknown field "spec.foo" has either:strict decoding error or:error validating data +++ [0317 21:10:13] Testing kubectl create --validate=warn Warning: unknown field "spec.baz" Warning: unknown field "spec.foo" [32mSuccessful (B[mmessage:deployment.apps/invalid-nginx-deployment created has:deployment.apps/invalid-nginx-deployment created ... skipping 11 lines ... I0317 21:10:13.509183 23124 event.go:307] "Event occurred" object="namespace-1679087412-14948/invalid-nginx-deployment-cbdccf466" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: invalid-nginx-deployment-cbdccf466-45t9n" I0317 21:10:13.551377 23124 event.go:307] "Event occurred" object="namespace-1679087412-14948/invalid-nginx-deployment-cbdccf466" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: invalid-nginx-deployment-cbdccf466-ntk9p" I0317 21:10:13.551413 23124 event.go:307] "Event occurred" object="namespace-1679087412-14948/invalid-nginx-deployment-cbdccf466" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: invalid-nginx-deployment-cbdccf466-tr5s8" deployment.apps "invalid-nginx-deployment" deleted I0317 21:10:13.573079 23124 event.go:307] "Event occurred" object="namespace-1679087412-14948/invalid-nginx-deployment-cbdccf466" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: invalid-nginx-deployment-cbdccf466-ddn5p" +++ [0317 21:10:13] Testing kubectl create E0317 21:10:13.607573 23124 replica_set.go:544] sync "namespace-1679087412-14948/invalid-nginx-deployment-cbdccf466" failed with replicasets.apps "invalid-nginx-deployment-cbdccf466" not found Successful message:Error from server (BadRequest): error when creating "hack/testdata/invalid-deployment-unknown-and-duplicate-fields.yaml": Deployment in version "v1" cannot be handled as a Deployment: strict decoding error: unknown field "spec.baz", unknown field "spec.foo" has either:strict decoding error or:error validating data +++ [0317 21:10:13] Testing kubectl create --validate=foo [32mSuccessful (B[mmessage:error: invalid - validate option "foo"; must be one of: strict (or true), warn, ignore (or false) has:invalid - validate option "foo" +++ exit code: 0 Recording: run_convert_tests Running command: run_convert_tests +++ Running case: test-cmd.run_convert_tests ... skipping 50 lines ... securityContext: {} terminationGracePeriodSeconds: 30 status: {} has:apps/v1beta1 deployment.apps "nginx" deleted [32mSuccessful (B[mmessage:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' has:Object 'Kind' is missing [32mSuccessful (B[mmessage:nginx: has:nginx: +++ exit code: 0 Recording: run_kubectl_delete_allnamespaces_tests ... skipping 103 lines ... has:Timeout [32mSuccessful (B[mmessage:NAME READY STATUS RESTARTS AGE valid-pod 0/1 Pending 0 1s has:valid-pod [32mSuccessful (B[mmessage:error: Invalid timeout value. Timeout must be a single integer in seconds, or an integer followed by a corresponding time unit (e.g. 1s | 2m | 3h) has:Invalid timeout value pod "valid-pod" deleted +++ exit code: 0 Recording: run_crd_tests Running command: run_crd_tests ... skipping 157 lines ... (B[mFlag --record has been deprecated, --record will be removed in the future foo.company.com/test patched [32mcrd.sh:296: Successful get foos/test {{.patched}}: value2 (B[mFlag --record has been deprecated, --record will be removed in the future foo.company.com/test patched [32mcrd.sh:298: Successful get foos/test {{.patched}}: <no value> (B[m+++ [0317 21:10:24] "kubectl patch --local" returns error as expected for CustomResource: error: strategic merge patch is not supported for company.com/v1, Kind=Foo locally, try --type merge { "apiVersion": "company.com/v1", "kind": "Foo", "metadata": { "annotations": { "kubernetes.io/change-cause": "kubectl patch foos/test --server=https://127.0.0.1:6443 --insecure-skip-tls-verify=true --match-server-version=true --patch={\"patched\":null} --type=merge --record=true" ... skipping 228 lines ... (B[m[32mcrd.sh:519: Successful get bars {{range.items}}{{.metadata.name}}:{{end}}: (B[mnamespace/non-native-resources created bar.company.com/test created [32mcrd.sh:524: Successful get bars {{len .items}}: 1 (B[mnamespace "non-native-resources" deleted [32mcrd.sh:527: Successful get bars {{len .items}}: 0 (B[mError from server (NotFound): namespaces "non-native-resources" not found I0317 21:10:40.736221 20027 handler.go:165] Adding GroupVersion company.com v1 to ResourceManager customresourcedefinition.apiextensions.k8s.io "foos.company.com" deleted I0317 21:10:40.750658 20027 handler.go:165] Adding GroupVersion company.com v1 to ResourceManager I0317 21:10:40.779159 20027 handler.go:165] Adding GroupVersion company.com v1 to ResourceManager I0317 21:10:40.920947 20027 handler.go:165] Adding GroupVersion company.com v1 to ResourceManager customresourcedefinition.apiextensions.k8s.io "bars.company.com" deleted ... skipping 15 lines ... +++ [0317 21:10:41] Testing recursive resources +++ [0317 21:10:41] Creating namespace namespace-1679087441-6980 namespace/namespace-1679087441-6980 created Context "test" modified. [32mgeneric-resources.sh:202: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: (B[mW0317 21:10:41.779995 20027 cacher.go:171] Terminating all watchers from cacher foos.company.com E0317 21:10:41.781572 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource W0317 21:10:41.951403 20027 cacher.go:171] Terminating all watchers from cacher bars.company.com E0317 21:10:41.953019 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource [32mgeneric-resources.sh:206: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: (B[m[32mSuccessful (B[mmessage:pod/busybox0 created pod/busybox1 created error: error validating "hack/testdata/recursive/pod/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false has:error validating data: kind not set [32mgeneric-resources.sh:211: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: (B[mW0317 21:10:42.156372 20027 cacher.go:171] Terminating all watchers from cacher resources.mygroup.example.com E0317 21:10:42.157888 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource [32mgeneric-resources.sh:220: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: busybox:busybox: (B[m[32mSuccessful (B[mmessage:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' has:Object 'Kind' is missing W0317 21:10:42.313980 20027 cacher.go:171] Terminating all watchers from cacher validfoos.company.com E0317 21:10:42.315568 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource [32mgeneric-resources.sh:227: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: (B[mW0317 21:10:43.047619 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:10:43.047678 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource W0317 21:10:43.068266 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:10:43.068336 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource [32mgeneric-resources.sh:231: Successful get pods {{range.items}}{{.metadata.labels.status}}:{{end}}: replaced:replaced: (B[m[32mSuccessful (B[mmessage:pod/busybox0 replaced pod/busybox1 replaced error: error validating "hack/testdata/recursive/pod-modify/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false has:error validating data: kind not set [32mgeneric-resources.sh:236: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: (B[m[32mSuccessful (B[mmessage:Name: busybox0 Namespace: namespace-1679087441-6980 Priority: 0 Node: <none> ... skipping 154 lines ... QoS Class: BestEffort Node-Selectors: <none> Tolerations: <none> Events: <none> unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' has:Object 'Kind' is missing W0317 21:10:43.327914 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:10:43.327955 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource [32mgeneric-resources.sh:246: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: (B[m[32mgeneric-resources.sh:250: Successful get pods {{range.items}}{{.metadata.annotations.annotatekey}}:{{end}}: annotatevalue:annotatevalue: (B[m[32mSuccessful (B[mmessage:pod/busybox0 annotate pod/busybox1 annotate error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' has:Object 'Kind' is missing [32mgeneric-resources.sh:255: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: (B[mW0317 21:10:43.833998 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:10:43.834042 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource [32mgeneric-resources.sh:259: Successful get pods {{range.items}}{{.metadata.labels.status}}:{{end}}: replaced:replaced: (B[m[32mSuccessful (B[mmessage:Warning: resource pods/busybox0 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. pod/busybox0 configured Warning: resource pods/busybox1 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. pod/busybox1 configured error: error validating "hack/testdata/recursive/pod-modify/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false has:error validating data: kind not set [32mgeneric-resources.sh:264: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: (B[m[32mSuccessful (B[mmessage:busybox0:busybox1:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' has:busybox0:busybox1: [32mSuccessful (B[mmessage:busybox0:busybox1:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' has:Object 'Kind' is missing [32mgeneric-resources.sh:273: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: (B[mpod/busybox0 labeled pod/busybox1 labeled error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' [32mgeneric-resources.sh:278: Successful get pods {{range.items}}{{.metadata.labels.mylabel}}:{{end}}: myvalue:myvalue: (B[m[32mSuccessful (B[mmessage:pod/busybox0 labeled pod/busybox1 labeled error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' has:Object 'Kind' is missing [32mgeneric-resources.sh:283: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: (B[mpod/busybox0 patched pod/busybox1 patched error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' [32mgeneric-resources.sh:288: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: prom/busybox:prom/busybox: (B[m[32mSuccessful (B[mmessage:pod/busybox0 patched pod/busybox1 patched error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' has:Object 'Kind' is missing [32mgeneric-resources.sh:293: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: (B[mW0317 21:10:44.832588 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:10:44.832629 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource [32mgeneric-resources.sh:297: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: (B[m[32mSuccessful (B[mmessage:Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. pod "busybox0" force deleted pod "busybox1" force deleted error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' has:Object 'Kind' is missing [32mgeneric-resources.sh:302: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: (B[mreplicationcontroller/busybox0 created I0317 21:10:45.388076 23124 event.go:307] "Event occurred" object="namespace-1679087441-6980/busybox0" fieldPath="" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: busybox0-hzqpf" replicationcontroller/busybox1 created error: error validating "hack/testdata/recursive/rc/rc/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false I0317 21:10:45.437614 23124 event.go:307] "Event occurred" object="namespace-1679087441-6980/busybox1" fieldPath="" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: busybox1-26r6f" I0317 21:10:45.479041 23124 namespace_controller.go:182] "Namespace has been deleted" namespace="non-native-resources" [32mgeneric-resources.sh:306: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: (B[mW0317 21:10:45.535765 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:10:45.535811 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource [32mgeneric-resources.sh:311: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: (B[m[32mgeneric-resources.sh:312: Successful get rc busybox0 {{.spec.replicas}}: 1 (B[m[32mgeneric-resources.sh:313: Successful get rc busybox1 {{.spec.replicas}}: 1 (B[m[32mgeneric-resources.sh:318: Successful get hpa busybox0 {{.spec.minReplicas}} {{.spec.maxReplicas}} {{(index .spec.metrics 0).resource.target.averageUtilization}}: 1 2 80 (B[m[32mgeneric-resources.sh:319: Successful get hpa busybox1 {{.spec.minReplicas}} {{.spec.maxReplicas}} {{(index .spec.metrics 0).resource.target.averageUtilization}}: 1 2 80 (B[m[32mSuccessful (B[mmessage:horizontalpodautoscaler.autoscaling/busybox0 autoscaled horizontalpodautoscaler.autoscaling/busybox1 autoscaled error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' has:Object 'Kind' is missing horizontalpodautoscaler.autoscaling "busybox0" deleted horizontalpodautoscaler.autoscaling "busybox1" deleted W0317 21:10:46.173211 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:10:46.173255 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource [32mgeneric-resources.sh:327: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: (B[m[32mgeneric-resources.sh:328: Successful get rc busybox0 {{.spec.replicas}}: 1 (B[m[32mgeneric-resources.sh:329: Successful get rc busybox1 {{.spec.replicas}}: 1 (B[mW0317 21:10:46.447915 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:10:46.447954 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource I0317 21:10:46.510649 20027 alloc.go:330] "allocated clusterIPs" service="namespace-1679087441-6980/busybox0" clusterIPs=map[IPv4:10.0.0.140] I0317 21:10:46.557697 20027 alloc.go:330] "allocated clusterIPs" service="namespace-1679087441-6980/busybox1" clusterIPs=map[IPv4:10.0.0.61] [32mgeneric-resources.sh:333: Successful get service busybox0 {{(index .spec.ports 0).name}} {{(index .spec.ports 0).port}}: <no value> 80 (B[m[32mgeneric-resources.sh:334: Successful get service busybox1 {{(index .spec.ports 0).name}} {{(index .spec.ports 0).port}}: <no value> 80 (B[m[32mSuccessful (B[mmessage:service/busybox0 exposed service/busybox1 exposed error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' has:Object 'Kind' is missing [32mgeneric-resources.sh:340: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: (B[m[32mgeneric-resources.sh:341: Successful get rc busybox0 {{.spec.replicas}}: 1 (B[m[32mgeneric-resources.sh:342: Successful get rc busybox1 {{.spec.replicas}}: 1 (B[mI0317 21:10:47.074864 23124 event.go:307] "Event occurred" object="namespace-1679087441-6980/busybox0" fieldPath="" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: busybox0-gwkm2" I0317 21:10:47.100217 23124 event.go:307] "Event occurred" object="namespace-1679087441-6980/busybox1" fieldPath="" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: busybox1-fcdzw" [32mgeneric-resources.sh:346: Successful get rc busybox0 {{.spec.replicas}}: 2 (B[m[32mgeneric-resources.sh:347: Successful get rc busybox1 {{.spec.replicas}}: 2 (B[m[32mSuccessful (B[mmessage:replicationcontroller/busybox0 scaled replicationcontroller/busybox1 scaled error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' has:Object 'Kind' is missing [32mgeneric-resources.sh:352: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: (B[m[32mgeneric-resources.sh:356: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: (B[m[32mSuccessful (B[mmessage:Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. replicationcontroller "busybox0" force deleted replicationcontroller "busybox1" force deleted error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' has:Object 'Kind' is missing [32mgeneric-resources.sh:361: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: (B[mdeployment.apps/nginx1-deployment created I0317 21:10:47.808536 23124 event.go:307] "Event occurred" object="namespace-1679087441-6980/nginx1-deployment" fieldPath="" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx1-deployment-69c599568 to 2" deployment.apps/nginx0-deployment created error: error validating "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false I0317 21:10:47.830463 23124 event.go:307] "Event occurred" object="namespace-1679087441-6980/nginx1-deployment-69c599568" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx1-deployment-69c599568-26l76" I0317 21:10:47.871416 23124 event.go:307] "Event occurred" object="namespace-1679087441-6980/nginx1-deployment-69c599568" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx1-deployment-69c599568-dntcz" I0317 21:10:47.871547 23124 event.go:307] "Event occurred" object="namespace-1679087441-6980/nginx0-deployment" fieldPath="" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx0-deployment-5944978c6f to 2" I0317 21:10:47.886826 23124 event.go:307] "Event occurred" object="namespace-1679087441-6980/nginx0-deployment-5944978c6f" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx0-deployment-5944978c6f-7p6gz" I0317 21:10:47.909412 23124 event.go:307] "Event occurred" object="namespace-1679087441-6980/nginx0-deployment-5944978c6f" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx0-deployment-5944978c6f-746lf" [32mgeneric-resources.sh:365: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx0-deployment:nginx1-deployment: (B[m[32mgeneric-resources.sh:366: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: registry.k8s.io/nginx:1.7.9:registry.k8s.io/nginx:1.7.9: (B[m[32mgeneric-resources.sh:370: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: registry.k8s.io/nginx:1.7.9:registry.k8s.io/nginx:1.7.9: (B[m[32mSuccessful (B[mmessage:deployment.apps/nginx1-deployment skipped rollback (current template already matches revision 1) deployment.apps/nginx0-deployment skipped rollback (current template already matches revision 1) error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"registry.k8s.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}' has:Object 'Kind' is missing deployment.apps/nginx1-deployment paused deployment.apps/nginx0-deployment paused [32mgeneric-resources.sh:378: Successful get deployment {{range.items}}{{.spec.paused}}:{{end}}: true:true: (B[m[32mSuccessful (B[mmessage:unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"registry.k8s.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}' has:Object 'Kind' is missing deployment.apps/nginx1-deployment resumed deployment.apps/nginx0-deployment resumed [32mgeneric-resources.sh:384: Successful get deployment {{range.items}}{{.spec.paused}}:{{end}}: <no value>:<no value>: (B[m[32mSuccessful (B[mmessage:unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"registry.k8s.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}' has:Object 'Kind' is missing W0317 21:10:49.540199 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:10:49.540238 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource [32mSuccessful (B[mmessage:Waiting for deployment "nginx1-deployment" rollout to finish: 0 of 2 updated replicas are available... timed out waiting for the condition unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"registry.k8s.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}' has:Waiting for deployment "nginx1-deployment" rollout to finish [32mSuccessful (B[mmessage:Waiting for deployment "nginx1-deployment" rollout to finish: 0 of 2 updated replicas are available... timed out waiting for the condition unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"registry.k8s.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}' has:Object 'Kind' is missing W0317 21:10:49.881076 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:10:49.881115 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource W0317 21:10:50.433060 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:10:50.433118 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource [32mSuccessful (B[mmessage:Waiting for deployment "nginx1-deployment" rollout to finish: 0 of 2 updated replicas are available... Waiting for deployment "nginx0-deployment" rollout to finish: 0 of 2 updated replicas are available... timed out waiting for the condition timed out waiting for the condition unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"registry.k8s.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}' ... skipping 18 lines ... 1 <none> deployment.apps/nginx0-deployment REVISION CHANGE-CAUSE 1 <none> error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"registry.k8s.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}' has:nginx0-deployment [32mSuccessful (B[mmessage:deployment.apps/nginx1-deployment REVISION CHANGE-CAUSE 1 <none> deployment.apps/nginx0-deployment REVISION CHANGE-CAUSE 1 <none> error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"registry.k8s.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}' has:nginx1-deployment [32mSuccessful (B[mmessage:deployment.apps/nginx1-deployment REVISION CHANGE-CAUSE 1 <none> deployment.apps/nginx0-deployment REVISION CHANGE-CAUSE 1 <none> error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"registry.k8s.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}' has:Object 'Kind' is missing Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. deployment.apps "nginx1-deployment" force deleted deployment.apps "nginx0-deployment" force deleted error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"registry.k8s.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}' W0317 21:10:52.077554 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:10:52.077614 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource [32mgeneric-resources.sh:411: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: (B[mreplicationcontroller/busybox0 created I0317 21:10:53.315891 23124 event.go:307] "Event occurred" object="namespace-1679087441-6980/busybox0" fieldPath="" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: busybox0-t7fss" replicationcontroller/busybox1 created error: error validating "hack/testdata/recursive/rc/rc/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false I0317 21:10:53.396309 23124 event.go:307] "Event occurred" object="namespace-1679087441-6980/busybox1" fieldPath="" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: busybox1-lmhdr" [32mgeneric-resources.sh:415: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: (B[m[32mSuccessful (B[mmessage:no rollbacker has been implemented for "ReplicationController" no rollbacker has been implemented for "ReplicationController" unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' ... skipping 2 lines ... (B[mmessage:no rollbacker has been implemented for "ReplicationController" no rollbacker has been implemented for "ReplicationController" unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' has:Object 'Kind' is missing [32mSuccessful (B[mmessage:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' error: replicationcontrollers "busybox0" pausing is not supported error: replicationcontrollers "busybox1" pausing is not supported has:Object 'Kind' is missing [32mSuccessful (B[mmessage:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' error: replicationcontrollers "busybox0" pausing is not supported error: replicationcontrollers "busybox1" pausing is not supported has:replicationcontrollers "busybox0" pausing is not supported [32mSuccessful (B[mmessage:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' error: replicationcontrollers "busybox0" pausing is not supported error: replicationcontrollers "busybox1" pausing is not supported has:replicationcontrollers "busybox1" pausing is not supported [32mSuccessful (B[mmessage:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' error: replicationcontrollers "busybox0" resuming is not supported error: replicationcontrollers "busybox1" resuming is not supported has:Object 'Kind' is missing [32mSuccessful (B[mmessage:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' error: replicationcontrollers "busybox0" resuming is not supported error: replicationcontrollers "busybox1" resuming is not supported has:replicationcontrollers "busybox0" resuming is not supported [32mSuccessful (B[mmessage:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' error: replicationcontrollers "busybox0" resuming is not supported error: replicationcontrollers "busybox1" resuming is not supported has:replicationcontrollers "busybox1" resuming is not supported Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. replicationcontroller "busybox0" force deleted replicationcontroller "busybox1" force deleted error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' +++ exit code: 0 Recording: run_namespace_tests Running command: run_namespace_tests +++ Running case: test-cmd.run_namespace_tests +++ working dir: /home/prow/go/src/k8s.io/kubernetes +++ command: run_namespace_tests +++ [0317 21:10:54] Testing kubectl(v1:namespaces) [32mSuccessful (B[mmessage:Error from server (NotFound): namespaces "my-namespace" not found has: not found namespace/my-namespace created (dry run) namespace/my-namespace created (server dry run) [32mSuccessful (B[mmessage:Error from server (NotFound): namespaces "my-namespace" not found has: not found namespace/my-namespace created [32mcore.sh:1504: Successful get namespaces/my-namespace {{.metadata.name}}: my-namespace (B[mquery for namespaces had limit param query for resourcequotas had limit param query for limitranges had limit param ... skipping 132 lines ... I0317 21:10:55.615780 41999 round_trippers.go:553] GET https://127.0.0.1:6443/api/v1/namespaces/namespace-1679087420-29112/resourcequotas?limit=500 200 OK in 1 milliseconds I0317 21:10:55.617235 41999 round_trippers.go:553] GET https://127.0.0.1:6443/api/v1/namespaces/namespace-1679087420-29112/limitranges?limit=500 200 OK in 1 milliseconds I0317 21:10:55.619029 41999 round_trippers.go:553] GET https://127.0.0.1:6443/api/v1/namespaces/namespace-1679087441-6980 200 OK in 1 milliseconds I0317 21:10:55.620487 41999 round_trippers.go:553] GET https://127.0.0.1:6443/api/v1/namespaces/namespace-1679087441-6980/resourcequotas?limit=500 200 OK in 1 milliseconds I0317 21:10:55.621900 41999 round_trippers.go:553] GET https://127.0.0.1:6443/api/v1/namespaces/namespace-1679087441-6980/limitranges?limit=500 200 OK in 1 milliseconds (B[mnamespace "my-namespace" deleted W0317 21:10:58.365472 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:10:58.365514 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource I0317 21:10:58.814440 23124 shared_informer.go:311] Waiting for caches to sync for resource quota I0317 21:10:58.814482 23124 shared_informer.go:318] Caches are synced for resource quota I0317 21:10:59.037288 23124 shared_informer.go:311] Waiting for caches to sync for garbage collector I0317 21:10:59.037334 23124 shared_informer.go:318] Caches are synced for garbage collector W0317 21:11:00.340743 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:11:00.340786 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource I0317 21:11:00.829479 23124 horizontal.go:512] "Horizontal Pod Autoscaler has been deleted" HPA="namespace-1679087441-6980/busybox0" I0317 21:11:00.842981 23124 horizontal.go:512] "Horizontal Pod Autoscaler has been deleted" HPA="namespace-1679087441-6980/busybox1" W0317 21:11:00.844048 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:11:00.844089 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource namespace/my-namespace condition met [32mSuccessful (B[mmessage:Error from server (NotFound): namespaces "my-namespace" not found has: not found namespace/my-namespace created [32mcore.sh:1515: Successful get namespaces/my-namespace {{.metadata.name}}: my-namespace (B[m[32mSuccessful (B[mmessage:Warning: deleting cluster-scoped resources, not scoped to the provided namespace namespace "kube-node-lease" deleted ... skipping 34 lines ... namespace "namespace-1679087414-13507" deleted namespace "namespace-1679087414-27621" deleted namespace "namespace-1679087416-3671" deleted namespace "namespace-1679087418-22363" deleted namespace "namespace-1679087420-29112" deleted namespace "namespace-1679087441-6980" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted Error from server (Forbidden): namespaces "kube-public" is forbidden: this namespace may not be deleted Error from server (Forbidden): namespaces "kube-system" is forbidden: this namespace may not be deleted has:Warning: deleting cluster-scoped resources [32mSuccessful (B[mmessage:Warning: deleting cluster-scoped resources, not scoped to the provided namespace namespace "kube-node-lease" deleted namespace "my-namespace" deleted namespace "namespace-1679087220-8973" deleted ... skipping 32 lines ... namespace "namespace-1679087414-13507" deleted namespace "namespace-1679087414-27621" deleted namespace "namespace-1679087416-3671" deleted namespace "namespace-1679087418-22363" deleted namespace "namespace-1679087420-29112" deleted namespace "namespace-1679087441-6980" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted Error from server (Forbidden): namespaces "kube-public" is forbidden: this namespace may not be deleted Error from server (Forbidden): namespaces "kube-system" is forbidden: this namespace may not be deleted has:namespace "my-namespace" deleted namespace/quotas created [32mcore.sh:1522: Successful get namespaces/quotas {{.metadata.name}}: quotas (B[m[32mcore.sh:1523: Successful get quota --namespace=quotas {{range.items}}{{ if eq .metadata.name "test-quota" }}found{{end}}{{end}}:: : (B[mresourcequota/test-quota created (dry run) resourcequota/test-quota created (server dry run) [32mcore.sh:1527: Successful get quota --namespace=quotas {{range.items}}{{ if eq .metadata.name "test-quota" }}found{{end}}{{end}}:: : (B[mresourcequota/test-quota created W0317 21:11:02.851533 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:11:02.851573 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource [32mcore.sh:1530: Successful get quota --namespace=quotas {{range.items}}{{ if eq .metadata.name "test-quota" }}found{{end}}{{end}}:: found: (B[mquery for resourcequotas had limit param query for resourcequotas had user-specified limit param [32mSuccessful describe resourcequotas verbose logs: I0317 21:11:02.922223 42203 loader.go:373] Config loaded from file: /tmp/tmp.uL2QQvcwka/.kube/config I0317 21:11:02.927748 42203 round_trippers.go:553] GET https://127.0.0.1:6443/version?timeout=32s 200 OK in 5 milliseconds ... skipping 7 lines ... [32mcore.sh:1548: Successful get namespaces/other {{.metadata.name}}: other (B[m[32mcore.sh:1552: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: (B[mpod/valid-pod created [32mcore.sh:1556: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod: (B[m[32mcore.sh:1558: Successful get pods -n other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod: (B[m[32mSuccessful (B[mmessage:error: a resource cannot be retrieved by name across all namespaces has:a resource cannot be retrieved by name across all namespaces [32mcore.sh:1565: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod: (B[mWarning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. pod "valid-pod" force deleted [32mcore.sh:1569: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: (B[mnamespace "other" deleted ... skipping 103 lines ... I0317 21:11:17.389726 42627 round_trippers.go:553] GET https://127.0.0.1:6443/api/v1/namespaces/test-secrets/secrets/test-secret 200 OK in 1 milliseconds (B[msecret "test-secret" deleted [32mcore.sh:856: Successful get secrets --namespace=test-secrets {{range.items}}{{.metadata.name}}:{{end}}: (B[msecret/test-secret created [32mcore.sh:860: Successful get secret/test-secret --namespace=test-secrets {{.metadata.name}}: test-secret (B[m[32mcore.sh:861: Successful get secret/test-secret --namespace=test-secrets {{.type}}: kubernetes.io/dockerconfigjson (B[mW0317 21:11:17.983224 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:11:17.983273 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource secret "test-secret" deleted [32mcore.sh:871: Successful get secrets --namespace=test-secrets {{range.items}}{{.metadata.name}}:{{end}}: (B[msecret/test-secret created [32mcore.sh:875: Successful get secret/test-secret --namespace=test-secrets {{.metadata.name}}: test-secret (B[m[32mcore.sh:876: Successful get secret/test-secret --namespace=test-secrets {{.type}}: kubernetes.io/dockerconfigjson (B[msecret "test-secret" deleted ... skipping 11 lines ... (B[m[32mcore.sh:920: Successful get secret/secret-string-data --namespace=test-secrets {{.data}}: map[k1:djE= k2:djI=] (B[m[32mcore.sh:921: Successful get secret/secret-string-data --namespace=test-secrets {{.stringData}}: <no value> (B[msecret "secret-string-data" deleted [32mcore.sh:930: Successful get secrets --namespace=test-secrets {{range.items}}{{.metadata.name}}:{{end}}: (B[msecret "test-secret" deleted namespace "test-secrets" deleted W0317 21:11:21.335789 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:11:21.335830 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource I0317 21:11:21.437832 23124 namespace_controller.go:182] "Namespace has been deleted" namespace="other" W0317 21:11:22.212568 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:11:22.212653 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource W0317 21:11:24.721831 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:11:24.721871 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource +++ exit code: 0 Recording: run_configmap_tests Running command: run_configmap_tests +++ Running case: test-cmd.run_configmap_tests +++ working dir: /home/prow/go/src/k8s.io/kubernetes ... skipping 43 lines ... +++ command: run_client_config_tests +++ [0317 21:11:32] Creating namespace namespace-1679087492-28814 namespace/namespace-1679087492-28814 created Context "test" modified. +++ [0317 21:11:32] Testing client config [32mSuccessful (B[mmessage:error: stat missing: no such file or directory has:missing: no such file or directory [32mSuccessful (B[mmessage:error: stat missing: no such file or directory has:missing: no such file or directory [32mSuccessful (B[mmessage:error: stat missing: no such file or directory has:missing: no such file or directory [32mSuccessful (B[mmessage:Error in configuration: context was not found for specified context: missing-context has:context was not found for specified context: missing-context [32mSuccessful (B[mmessage:error: no server found for cluster "missing-cluster" has:no server found for cluster "missing-cluster" [32mSuccessful (B[mmessage:error: auth info "missing-user" does not exist has:auth info "missing-user" does not exist [32mSuccessful (B[mmessage:error: error loading config file "/tmp/newconfig.yaml": no kind "Config" is registered for version "v-1" in scheme "vendor/k8s.io/client-go/tools/clientcmd/api/latest/latest.go:50" has:error loading config file [32mSuccessful (B[mmessage:error: stat missing-config: no such file or directory has:no such file or directory +++ exit code: 0 Recording: run_service_accounts_tests Running command: run_service_accounts_tests +++ Running case: test-cmd.run_service_accounts_tests ... skipping 57 lines ... Labels: <none> Annotations: <none> Schedule: 59 23 31 2 * Concurrency Policy: Allow Suspend: False Successful Job History Limit: 3 Failed Job History Limit: 1 Starting Deadline Seconds: <unset> Selector: <unset> Parallelism: <unset> Completions: <unset> Pod Template: Labels: <none> ... skipping 57 lines ... Annotations: batch.kubernetes.io/job-tracking: cronjob.kubernetes.io/instantiate: manual Parallelism: 1 Completions: 1 Completion Mode: NonIndexed Start Time: Fri, 17 Mar 2023 21:11:41 +0000 Pods Statuses: 1 Active (0 Ready) / 0 Succeeded / 0 Failed Pod Template: Labels: batch.kubernetes.io/controller-uid=75366a7a-7a8d-41a1-993c-59834b808769 batch.kubernetes.io/job-name=test-job controller-uid=75366a7a-7a8d-41a1-993c-59834b808769 job-name=test-job Containers: ... skipping 466 lines ... type: ClusterIP status: loadBalancer: {} [32mSuccessful (B[mmessage:kubectl-create kubectl-set has:kubectl-set error: you must specify resources by --filename when --local is set. Example resource specifications include: '-f rsrc.yaml' '--filename=rsrc.json' [32mcore.sh:1034: Successful get services redis-master {{range.spec.selector}}{{.}}:{{end}}: redis:master:backend: (B[mservice/redis-master selector updated [32mSuccessful (B[mmessage:Error from server (Conflict): Operation cannot be fulfilled on services "redis-master": the object has been modified; please apply your changes to the latest version and try again has:Conflict [32mcore.sh:1047: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master: (B[mI0317 21:11:51.921060 23124 namespace_controller.go:182] "Namespace has been deleted" namespace="test-jobs" service "redis-master" deleted [32mcore.sh:1054: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes: (B[m[32mcore.sh:1058: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes: ... skipping 6 lines ... [32mcore.sh:1087: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:service-v1-test: (B[mservice/service-v1-test replaced [32mcore.sh:1094: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:service-v1-test: (B[mservice "redis-master" deleted service "service-v1-test" deleted [32mcore.sh:1102: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes: (B[mW0317 21:11:53.550572 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:11:53.550626 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource [32mcore.sh:1106: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes: (B[mI0317 21:11:53.816824 20027 alloc.go:330] "allocated clusterIPs" service="default/redis-master" clusterIPs=map[IPv4:10.0.0.123] service/redis-master created I0317 21:11:54.078804 20027 alloc.go:330] "allocated clusterIPs" service="default/redis-slave" clusterIPs=map[IPv4:10.0.0.208] service/redis-slave created W0317 21:11:54.151175 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:11:54.151218 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource [32mcore.sh:1111: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:redis-slave: (B[m[32mSuccessful (B[mmessage:NAME RSRC kubernetes 191 redis-master 2154 redis-slave 2158 ... skipping 282 lines ... (B[mmessage:daemonset.apps/bind REVISION CHANGE-CAUSE 2 kubectl apply --filename=hack/testdata/rollingupdate-daemonset-rv2.yaml --record=true --server=https://127.0.0.1:6443 --insecure-skip-tls-verify=true --match-server-version=true 3 kubectl apply --filename=hack/testdata/rollingupdate-daemonset.yaml --record=true --server=https://127.0.0.1:6443 --insecure-skip-tls-verify=true --match-server-version=true has:3 kubectl apply [32mSuccessful (B[mmessage:error: unable to find specified revision 1000000 in history has:unable to find specified revision [32mapps.sh:122: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: registry.k8s.io/pause:2.0: (B[m[32mapps.sh:123: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1 (B[mdaemonset.apps/bind rolled back [32mapps.sh:126: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: registry.k8s.io/pause:latest: (B[m[32mapps.sh:127: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: registry.k8s.io/nginx:test-cmd: ... skipping 32 lines ... +++ command: run_rc_tests +++ [0317 21:12:01] Creating namespace namespace-1679087521-20113 namespace/namespace-1679087521-20113 created Context "test" modified. +++ [0317 21:12:01] Testing kubectl(v1:replicationcontrollers) [32mcore.sh:1205: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: (B[mW0317 21:12:02.039389 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:12:02.039430 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource replicationcontroller/frontend created I0317 21:12:02.124497 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/frontend" fieldPath="" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-7n2s6" I0317 21:12:02.152033 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/frontend" fieldPath="" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-59sbc" I0317 21:12:02.152092 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/frontend" fieldPath="" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-6v2c7" E0317 21:12:02.245632 23124 replica_set.go:544] sync "namespace-1679087521-20113/frontend" failed with replicationcontrollers "frontend" not found replicationcontroller "frontend" deleted [32mcore.sh:1210: Successful get pods -l name=frontend {{range.items}}{{.metadata.name}}:{{end}}: (B[m[32mcore.sh:1214: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: (B[mreplicationcontroller/frontend created I0317 21:12:02.683498 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/frontend" fieldPath="" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-5wffn" I0317 21:12:02.706563 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/frontend" fieldPath="" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-hw7mp" ... skipping 12 lines ... Namespace: namespace-1679087521-20113 Selector: app=guestbook,tier=frontend Labels: app=guestbook tier=frontend Annotations: <none> Replicas: 3 current / 3 desired Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed Pod Template: Labels: app=guestbook tier=frontend Containers: php-redis: Image: gcr.io/google_samples/gb-frontend:v4 ... skipping 17 lines ... Namespace: namespace-1679087521-20113 Selector: app=guestbook,tier=frontend Labels: app=guestbook tier=frontend Annotations: <none> Replicas: 3 current / 3 desired Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed Pod Template: Labels: app=guestbook tier=frontend Containers: php-redis: Image: gcr.io/google_samples/gb-frontend:v4 ... skipping 18 lines ... Namespace: namespace-1679087521-20113 Selector: app=guestbook,tier=frontend Labels: app=guestbook tier=frontend Annotations: <none> Replicas: 3 current / 3 desired Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed Pod Template: Labels: app=guestbook tier=frontend Containers: php-redis: Image: gcr.io/google_samples/gb-frontend:v4 ... skipping 12 lines ... Namespace: namespace-1679087521-20113 Selector: app=guestbook,tier=frontend Labels: app=guestbook tier=frontend Annotations: <none> Replicas: 3 current / 3 desired Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed Pod Template: Labels: app=guestbook tier=frontend Containers: php-redis: Image: gcr.io/google_samples/gb-frontend:v4 ... skipping 27 lines ... Namespace: namespace-1679087521-20113 Selector: app=guestbook,tier=frontend Labels: app=guestbook tier=frontend Annotations: <none> Replicas: 3 current / 3 desired Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed Pod Template: Labels: app=guestbook tier=frontend Containers: php-redis: Image: gcr.io/google_samples/gb-frontend:v4 ... skipping 17 lines ... Namespace: namespace-1679087521-20113 Selector: app=guestbook,tier=frontend Labels: app=guestbook tier=frontend Annotations: <none> Replicas: 3 current / 3 desired Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed Pod Template: Labels: app=guestbook tier=frontend Containers: php-redis: Image: gcr.io/google_samples/gb-frontend:v4 ... skipping 17 lines ... Namespace: namespace-1679087521-20113 Selector: app=guestbook,tier=frontend Labels: app=guestbook tier=frontend Annotations: <none> Replicas: 3 current / 3 desired Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed Pod Template: Labels: app=guestbook tier=frontend Containers: php-redis: Image: gcr.io/google_samples/gb-frontend:v4 ... skipping 11 lines ... Namespace: namespace-1679087521-20113 Selector: app=guestbook,tier=frontend Labels: app=guestbook tier=frontend Annotations: <none> Replicas: 3 current / 3 desired Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed Pod Template: Labels: app=guestbook tier=frontend Containers: php-redis: Image: gcr.io/google_samples/gb-frontend:v4 ... skipping 25 lines ... (B[m[32mcore.sh:1240: Successful get rc frontend {{.spec.replicas}}: 3 (B[mreplicationcontroller/frontend scaled E0317 21:12:03.769282 23124 replica_set.go:220] ReplicaSet has no controller: &ReplicaSet{ObjectMeta:{frontend namespace-1679087521-20113 b8953cf3-28a1-42b6-b8f2-735913043418 2270 2 2023-03-17 21:12:02 +0000 UTC <nil> <nil> map[app:guestbook tier:frontend] map[] [] [] [{kubectl Update v1 <nil> FieldsV1 {"f:spec":{"f:replicas":{}}} scale} {kube-controller-manager Update v1 2023-03-17 21:12:02 +0000 UTC FieldsV1 {"f:status":{"f:fullyLabeledReplicas":{},"f:observedGeneration":{},"f:replicas":{}}} status} {kubectl-create Update v1 2023-03-17 21:12:02 +0000 UTC FieldsV1 {"f:metadata":{"f:labels":{".":{},"f:app":{},"f:tier":{}}},"f:spec":{"f:selector":{},"f:template":{".":{},"f:metadata":{".":{},"f:creationTimestamp":{},"f:labels":{".":{},"f:app":{},"f:tier":{}}},"f:spec":{".":{},"f:containers":{".":{},"k:{\"name\":\"php-redis\"}":{".":{},"f:env":{".":{},"k:{\"name\":\"GET_HOSTS_FROM\"}":{".":{},"f:name":{},"f:value":{}}},"f:image":{},"f:imagePullPolicy":{},"f:name":{},"f:ports":{".":{},"k:{\"containerPort\":80,\"protocol\":\"TCP\"}":{".":{},"f:containerPort":{},"f:protocol":{}}},"f:resources":{".":{},"f:requests":{".":{},"f:cpu":{},"f:memory":{}}},"f:terminationMessagePath":{},"f:terminationMessagePolicy":{}}},"f:dnsPolicy":{},"f:restartPolicy":{},"f:schedulerName":{},"f:securityContext":{},"f:terminationGracePeriodSeconds":{}}}}} }]},Spec:ReplicaSetSpec{Replicas:*2,Selector:&v1.LabelSelector{MatchLabels:map[string]string{app: guestbook,tier: frontend,},MatchExpressions:[]LabelSelectorRequirement{},},Template:{{ 0 0001-01-01 00:00:00 +0000 UTC <nil> <nil> map[app:guestbook tier:frontend] map[] [] [] []} {[] [] [{php-redis gcr.io/google_samples/gb-frontend:v4 [] [] [{ 0 80 TCP }] [] [{GET_HOSTS_FROM dns nil}] {map[] map[cpu:{{100 -3} {<nil>} 100m DecimalSI} memory:{{104857600 0} {<nil>} 100Mi BinarySI}] []} [] [] [] nil nil nil nil /dev/termination-log File IfNotPresent nil false false false}] [] Always 0xc002ada308 <nil> ClusterFirst map[] <nil> false false false <nil> PodSecurityContext{SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,SupplementalGroups:[],FSGroup:nil,RunAsGroup:nil,Sysctls:[]Sysctl{},WindowsOptions:nil,FSGroupChangePolicy:nil,SeccompProfile:nil,} [] nil default-scheduler [] [] <nil> nil [] <nil> <nil> <nil> map[] [] <nil> nil <nil> [] []}},MinReadySeconds:0,},Status:ReplicaSetStatus{Replicas:3,FullyLabeledReplicas:3,ObservedGeneration:1,ReadyReplicas:0,AvailableReplicas:0,Conditions:[]ReplicaSetCondition{},},} I0317 21:12:03.806196 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/frontend" fieldPath="" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulDelete" message="Deleted pod: frontend-5wffn" [32mcore.sh:1244: Successful get rc frontend {{.spec.replicas}}: 2 (B[m[32mcore.sh:1248: Successful get rc frontend {{.spec.replicas}}: 2 (B[merror: Expected replicas to be 3, was 2 [32mcore.sh:1252: Successful get rc frontend {{.spec.replicas}}: 2 (B[m[32mcore.sh:1256: Successful get rc frontend {{.spec.replicas}}: 2 (B[mreplicationcontroller/frontend scaled I0317 21:12:04.277775 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/frontend" fieldPath="" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-7tfnq" [32mcore.sh:1260: Successful get rc frontend {{.spec.replicas}}: 3 (B[m[32mcore.sh:1264: Successful get rc frontend {{.spec.replicas}}: 3 ... skipping 70 lines ... I0317 21:12:07.550545 20027 alloc.go:330] "allocated clusterIPs" service="namespace-1679087521-20113/expose-test-deployment" clusterIPs=map[IPv4:10.0.0.210] [32mSuccessful (B[mmessage:service/expose-test-deployment exposed has:service/expose-test-deployment exposed service "expose-test-deployment" deleted [32mSuccessful (B[mmessage:error: couldn't retrieve selectors via --selector flag or introspection: invalid deployment: no selectors, therefore cannot be exposed has:invalid deployment: no selectors deployment.apps/nginx-deployment created I0317 21:12:07.971986 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/nginx-deployment" fieldPath="" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-deployment-7df65dc9f4 to 3" I0317 21:12:07.995786 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/nginx-deployment-7df65dc9f4" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-7df65dc9f4-wxclz" I0317 21:12:08.040239 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/nginx-deployment-7df65dc9f4" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-7df65dc9f4-tt6sz" I0317 21:12:08.040272 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/nginx-deployment-7df65dc9f4" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-7df65dc9f4-fgjwz" ... skipping 24 lines ... (B[mpod "valid-pod" deleted service "frontend" deleted service "frontend-2" deleted service "frontend-3" deleted service "frontend-4" deleted [32mSuccessful (B[mmessage:error: cannot expose a Node has:cannot expose [32mSuccessful (B[mmessage:The Service "invalid-large-service-name-that-has-more-than-sixty-three-characters" is invalid: metadata.name: Invalid value: "invalid-large-service-name-that-has-more-than-sixty-three-characters": must be no more than 63 characters has:metadata.name: Invalid value I0317 21:12:10.414543 20027 alloc.go:330] "allocated clusterIPs" service="namespace-1679087521-20113/kubernetes-serve-hostname-testing-sixty-three-characters-in-len" clusterIPs=map[IPv4:10.0.0.79] [32mSuccessful ... skipping 32 lines ... (B[mhorizontalpodautoscaler.autoscaling/frontend autoscaled [32mcore.sh:1436: Successful get hpa frontend {{.spec.minReplicas}} {{.spec.maxReplicas}} {{(index .spec.metrics 0).resource.target.averageUtilization}}: 1 2 70 (B[mhorizontalpodautoscaler.autoscaling "frontend" deleted horizontalpodautoscaler.autoscaling/frontend autoscaled [32mcore.sh:1440: Successful get hpa frontend {{.spec.minReplicas}} {{.spec.maxReplicas}} {{(index .spec.metrics 0).resource.target.averageUtilization}}: 2 3 80 (B[mhorizontalpodautoscaler.autoscaling "frontend" deleted error: required flag(s) "max" not set replicationcontroller "frontend" deleted [32mcore.sh:1449: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: (B[mW0317 21:12:13.372498 23124 reflector.go:533] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource E0317 21:12:13.372541 23124 reflector.go:148] vendor/k8s.io/client-go/metadata/metadatainformer/informer.go:106: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource apiVersion: apps/v1 kind: Deployment metadata: creationTimestamp: null labels: name: nginx-deployment-resources ... skipping 22 lines ... limits: cpu: 300m requests: cpu: 300m terminationGracePeriodSeconds: 0 status: {} Error from server (NotFound): deployments.apps "nginx-deployment-resources" not found deployment.apps/nginx-deployment-resources created I0317 21:12:13.696406 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/nginx-deployment-resources" fieldPath="" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-deployment-resources-5f79767bf9 to 3" I0317 21:12:13.725343 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/nginx-deployment-resources-5f79767bf9" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-resources-5f79767bf9-75dx6" I0317 21:12:13.790637 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/nginx-deployment-resources-5f79767bf9" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-resources-5f79767bf9-5jpkw" I0317 21:12:13.790673 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/nginx-deployment-resources-5f79767bf9" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-resources-5f79767bf9-ncnss" [32mcore.sh:1455: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx-deployment-resources: (B[m[32mcore.sh:1456: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: registry.k8s.io/nginx:test-cmd: (B[m[32mcore.sh:1457: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: registry.k8s.io/perl: (B[mdeployment.apps/nginx-deployment-resources resource requirements updated I0317 21:12:14.056557 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/nginx-deployment-resources" fieldPath="" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-deployment-resources-77d775b4f9 to 1" I0317 21:12:14.079448 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/nginx-deployment-resources-77d775b4f9" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-resources-77d775b4f9-5984h" [32mcore.sh:1460: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).resources.limits.cpu}}:{{end}}: 100m: (B[m[32mcore.sh:1461: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).resources.limits.cpu}}:{{end}}: 100m: (B[merror: unable to find container named redis deployment.apps/nginx-deployment-resources resource requirements updated I0317 21:12:14.436348 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/nginx-deployment-resources" fieldPath="" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled down replica set nginx-deployment-resources-5f79767bf9 to 2 from 3" [32mcore.sh:1466: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).resources.limits.cpu}}:{{end}}: 200m: (B[mI0317 21:12:14.496216 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/nginx-deployment-resources" fieldPath="" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-deployment-resources-688f8b78b5 to 1 from 0" I0317 21:12:14.505225 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/nginx-deployment-resources-5f79767bf9" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulDelete" message="Deleted pod: nginx-deployment-resources-5f79767bf9-5jpkw" I0317 21:12:14.516415 23124 event.go:307] "Event occurred" object="namespace-1679087521-20113/nginx-deployment-resources-688f8b78b5" fieldPath="" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-resources-688f8b78b5-5tgkt" ... skipping 155 lines ... status: "True" type: Progressing observedGeneration: 4 replicas: 4 unavailableReplicas: 4 updatedReplicas: 1 error: you must specify resources by --filename when --local is set. Example resource specifications include: '-f rsrc.yaml' '--filename=rsrc.json' [32mcore.sh:1477: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).resources.limits.cpu}}:{{end}}: 200m: (B[m[32mcore.sh:1478: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).resources.limits.cpu}}:{{end}}: 300m: (B[m[32mcore.sh:1479: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).resources.requests.cpu}}:{{end}}: 300m: ... skipping 46 lines ... pod-template-hash=859689d794 Annotations: deployment.kubernetes.io/desired-replicas: 1 deployment.kubernetes.io/max-replicas: 2 deployment.kubernetes.io/revision: 1 Controlled By: Deployment/test-nginx-apps Replicas: 1 current / 1 desired Pods Status: 0 Running / 1 Waiting / 0 Succeeded / 0 Failed Pod Template: Labels: app=test-nginx-apps pod-template-hash=859689d794 Containers: nginx: Image: registry.k8s.io/nginx:test-cmd ... skipping 88 lines ...