diff --git a/apis/telemetry/v1alpha1/logparser_types.go b/apis/telemetry/v1alpha1/logparser_types.go index 6e9128d42..288465087 100644 --- a/apis/telemetry/v1alpha1/logparser_types.go +++ b/apis/telemetry/v1alpha1/logparser_types.go @@ -20,13 +20,17 @@ import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) -// LogParserSpec defines the desired state of LogParser. -type LogParserSpec struct { - // INSERT ADDITIONAL SPEC FIELDS - desired state of cluster - // Important: Run "make" to regenerate code after modifying this file +//nolint:gochecknoinits // SchemeBuilder's registration is required. +func init() { + SchemeBuilder.Register(&LogParser{}, &LogParserList{}) +} - // [Fluent Bit Parsers](https://docs.fluentbit.io/manual/pipeline/parsers). The parser specified here has no effect until it is referenced by a [Pod annotation](https://docs.fluentbit.io/manual/pipeline/filters/kubernetes#kubernetes-annotations) on your workload or by a [Parser Filter](https://docs.fluentbit.io/manual/pipeline/filters/parser) defined in a pipeline's filters section. - Parser string `json:"parser,omitempty"` +// +kubebuilder:object:root=true +// LogParserList contains a list of LogParser. +type LogParserList struct { + metav1.TypeMeta `json:",inline"` + metav1.ListMeta `json:"metadata,omitempty"` + Items []LogParser `json:"items"` } // +kubebuilder:object:root=true @@ -35,7 +39,6 @@ type LogParserSpec struct { // +kubebuilder:printcolumn:name="Agent Healthy",type=string,JSONPath=`.status.conditions[?(@.type=="AgentHealthy")].status` // +kubebuilder:printcolumn:name="Age",type=date,JSONPath=`.metadata.creationTimestamp` // +kubebuilder:deprecatedversion:warning="The LogParser API is deprecated. Instead, log in JSON format and use the JSON parsing feature of the LogPipeline" - // LogParser is the Schema for the logparsers API. type LogParser struct { metav1.TypeMeta `json:",inline"` @@ -46,13 +49,13 @@ type LogParser struct { Status LogParserStatus `json:"status,omitempty"` } -// +kubebuilder:object:root=true +// LogParserSpec defines the desired state of LogParser. +type LogParserSpec struct { + // INSERT ADDITIONAL SPEC FIELDS - desired state of cluster + // Important: Run "make" to regenerate code after modifying this file -// LogParserList contains a list of LogParser. -type LogParserList struct { - metav1.TypeMeta `json:",inline"` - metav1.ListMeta `json:"metadata,omitempty"` - Items []LogParser `json:"items"` + // [Fluent Bit Parsers](https://docs.fluentbit.io/manual/pipeline/parsers). The parser specified here has no effect until it is referenced by a [Pod annotation](https://docs.fluentbit.io/manual/pipeline/filters/kubernetes#kubernetes-annotations) on your workload or by a [Parser Filter](https://docs.fluentbit.io/manual/pipeline/filters/parser) defined in a pipeline's filters section. + Parser string `json:"parser,omitempty"` } // LogParserStatus shows the observed state of the LogParser. @@ -60,8 +63,3 @@ type LogParserStatus struct { // An array of conditions describing the status of the parser. Conditions []metav1.Condition `json:"conditions,omitempty"` } - -//nolint:gochecknoinits // SchemeBuilder's registration is required. -func init() { - SchemeBuilder.Register(&LogParser{}, &LogParserList{}) -} diff --git a/apis/telemetry/v1alpha1/logpipeline_conversion.go b/apis/telemetry/v1alpha1/logpipeline_conversion.go index 54e9c3039..8237286e8 100644 --- a/apis/telemetry/v1alpha1/logpipeline_conversion.go +++ b/apis/telemetry/v1alpha1/logpipeline_conversion.go @@ -43,7 +43,7 @@ func (lp *LogPipeline) ConvertTo(dstRaw conversion.Hub) error { Port: srcHTTPOutput.Port, Compress: srcHTTPOutput.Compress, Format: srcHTTPOutput.Format, - TLSConfig: v1Alpha1TLSToV1Beta1(srcHTTPOutput.TLSConfig), + TLSConfig: v1Alpha1TLSToV1Beta1(srcHTTPOutput.TLS), Dedot: srcHTTPOutput.Dedot, } } @@ -86,19 +86,19 @@ func v1Alpha1OTLPInputToV1Beta1(otlp *OTLPInput) *telemetryv1beta1.OTLPInput { return input } -func v1Alpha1ApplicationToV1Beta1(application *ApplicationInput) *telemetryv1beta1.LogPipelineRuntimeInput { +func v1Alpha1ApplicationToV1Beta1(application *LogPipelineApplicationInput) *telemetryv1beta1.LogPipelineRuntimeInput { if application == nil { return nil } runtime := &telemetryv1beta1.LogPipelineRuntimeInput{ Enabled: application.Enabled, - Namespaces: telemetryv1beta1.LogPipelineInputNamespaces{ + Namespaces: telemetryv1beta1.LogPipelineNamespaceSelector{ Include: application.Namespaces.Include, Exclude: application.Namespaces.Exclude, System: application.Namespaces.System, }, - Containers: telemetryv1beta1.LogPipelineInputContainers{ + Containers: telemetryv1beta1.LogPipelineContainerSelector{ Include: application.Containers.Include, Exclude: application.Containers.Exclude, }, @@ -190,7 +190,7 @@ func v1Alpha1ValueTypeToV1Beta1(src ValueType) telemetryv1beta1.ValueType { } } -func v1Alpha1TLSToV1Beta1(src TLSConfig) telemetryv1beta1.OutputTLS { +func v1Alpha1TLSToV1Beta1(src LogPipelineOutputTLS) telemetryv1beta1.OutputTLS { var dst telemetryv1beta1.OutputTLS if src.CA != nil { @@ -229,24 +229,24 @@ func (lp *LogPipeline) ConvertFrom(srcRaw conversion.Hub) error { dst.Spec.Input.OTLP = v1Beta1OTLPInputToV1Alpha1(src.Spec.Input.OTLP) for _, f := range src.Spec.Files { - dst.Spec.Files = append(dst.Spec.Files, FileMount(f)) + dst.Spec.Files = append(dst.Spec.Files, LogPipelineFileMount(f)) } for _, f := range src.Spec.Filters { - dst.Spec.Filters = append(dst.Spec.Filters, Filter(f)) + dst.Spec.Filters = append(dst.Spec.Filters, LogPipelineFilter(f)) } if srcHTTPOutput := src.Spec.Output.HTTP; srcHTTPOutput != nil { - dst.Spec.Output.HTTP = &HTTPOutput{ - Host: v1Beta1ValueTypeToV1Alpha1(srcHTTPOutput.Host), - User: v1Beta1ValueTypeToV1Alpha1(srcHTTPOutput.User), - Password: v1Beta1ValueTypeToV1Alpha1(srcHTTPOutput.Password), - URI: srcHTTPOutput.URI, - Port: srcHTTPOutput.Port, - Compress: srcHTTPOutput.Compress, - Format: srcHTTPOutput.Format, - TLSConfig: v1Beta1TLSToV1Alpha1(srcHTTPOutput.TLSConfig), - Dedot: srcHTTPOutput.Dedot, + dst.Spec.Output.HTTP = &LogPipelineHTTPOutput{ + Host: v1Beta1ValueTypeToV1Alpha1(srcHTTPOutput.Host), + User: v1Beta1ValueTypeToV1Alpha1(srcHTTPOutput.User), + Password: v1Beta1ValueTypeToV1Alpha1(srcHTTPOutput.Password), + URI: srcHTTPOutput.URI, + Port: srcHTTPOutput.Port, + Compress: srcHTTPOutput.Compress, + Format: srcHTTPOutput.Format, + TLS: v1Beta1TLSToV1Alpha1(srcHTTPOutput.TLSConfig), + Dedot: srcHTTPOutput.Dedot, } } @@ -270,19 +270,19 @@ func (lp *LogPipeline) ConvertFrom(srcRaw conversion.Hub) error { return nil } -func v1Beta1RuntimeToV1Alpha1(runtime *telemetryv1beta1.LogPipelineRuntimeInput) *ApplicationInput { +func v1Beta1RuntimeToV1Alpha1(runtime *telemetryv1beta1.LogPipelineRuntimeInput) *LogPipelineApplicationInput { if runtime == nil { return nil } - application := &ApplicationInput{ + application := &LogPipelineApplicationInput{ Enabled: runtime.Enabled, - Namespaces: InputNamespaces{ + Namespaces: LogPipelineNamespaceSelector{ Include: runtime.Namespaces.Include, Exclude: runtime.Namespaces.Exclude, System: runtime.Namespaces.System, }, - Containers: InputContainers{ + Containers: LogPipelineContainerSelector{ Include: runtime.Containers.Include, Exclude: runtime.Containers.Exclude, }, @@ -378,8 +378,8 @@ func v1Beta1BasicAuthOptionsToV1Alpha1(basic *telemetryv1beta1.BasicAuthOptions) } } -func v1Beta1TLSToV1Alpha1(src telemetryv1beta1.OutputTLS) TLSConfig { - var dst TLSConfig +func v1Beta1TLSToV1Alpha1(src telemetryv1beta1.OutputTLS) LogPipelineOutputTLS { + var dst LogPipelineOutputTLS if src.CA != nil { ca := v1Beta1ValueTypeToV1Alpha1(*src.CA) diff --git a/apis/telemetry/v1alpha1/logpipeline_conversion_test.go b/apis/telemetry/v1alpha1/logpipeline_conversion_test.go index dae64580b..f436480b0 100644 --- a/apis/telemetry/v1alpha1/logpipeline_conversion_test.go +++ b/apis/telemetry/v1alpha1/logpipeline_conversion_test.go @@ -17,15 +17,15 @@ func TestConvertTo(t *testing.T) { Name: "log-pipeline-test", }, Spec: LogPipelineSpec{ - Input: Input{ - Application: &ApplicationInput{ + Input: LogPipelineInput{ + Application: &LogPipelineApplicationInput{ Enabled: ptr.To(true), - Namespaces: InputNamespaces{ + Namespaces: LogPipelineNamespaceSelector{ Include: []string{"default", "kube-system"}, Exclude: []string{"kube-public"}, System: true, }, - Containers: InputContainers{ + Containers: LogPipelineContainerSelector{ Include: []string{"nginx", "app"}, Exclude: []string{"sidecar"}, }, @@ -41,15 +41,15 @@ func TestConvertTo(t *testing.T) { }, }, }, - Files: []FileMount{ + Files: []LogPipelineFileMount{ {Name: "file1", Content: "file1-content"}, }, - Filters: []Filter{ + Filters: []LogPipelineFilter{ {Custom: "name stdout"}, }, - Output: Output{ + Output: LogPipelineOutput{ Custom: "custom-output", - HTTP: &HTTPOutput{ + HTTP: &LogPipelineHTTPOutput{ Host: ValueType{ Value: "http://localhost", }, @@ -69,7 +69,7 @@ func TestConvertTo(t *testing.T) { Port: "8080", Compress: "on", Format: "json", - TLSConfig: TLSConfig{ + TLS: LogPipelineOutputTLS{ SkipCertificateValidation: true, CA: &ValueType{ Value: "ca", @@ -166,12 +166,12 @@ func TestConvertFrom(t *testing.T) { Input: telemetryv1beta1.LogPipelineInput{ Runtime: &telemetryv1beta1.LogPipelineRuntimeInput{ Enabled: ptr.To(true), - Namespaces: telemetryv1beta1.LogPipelineInputNamespaces{ + Namespaces: telemetryv1beta1.LogPipelineNamespaceSelector{ Include: []string{"default", "kube-system"}, Exclude: []string{"kube-public"}, System: true, }, - Containers: telemetryv1beta1.LogPipelineInputContainers{ + Containers: telemetryv1beta1.LogPipelineContainerSelector{ Include: []string{"nginx", "app"}, Exclude: []string{"sidecar"}, }, @@ -332,10 +332,10 @@ func requireLogPipelinesEquivalent(t *testing.T, x *LogPipeline, y *telemetryv1b require.Equal(t, xHTTP.Port, yHTTP.Port, "HTTP port mismatch") require.Equal(t, xHTTP.Compress, yHTTP.Compress, "HTTP compress mismatch") require.Equal(t, xHTTP.Format, yHTTP.Format, "HTTP format mismatch") - require.Equal(t, xHTTP.TLSConfig.SkipCertificateValidation, yHTTP.TLSConfig.SkipCertificateValidation, "HTTP TLS skip certificate validation mismatch") - require.Equal(t, xHTTP.TLSConfig.CA.Value, yHTTP.TLSConfig.CA.Value, "HTTP TLS CA mismatch") - require.Equal(t, xHTTP.TLSConfig.Cert.Value, yHTTP.TLSConfig.Cert.Value, "HTTP TLS cert mismatch") - require.Equal(t, xHTTP.TLSConfig.Key.Value, yHTTP.TLSConfig.Key.Value, "HTTP TLS key mismatch") + require.Equal(t, xHTTP.TLS.SkipCertificateValidation, yHTTP.TLSConfig.SkipCertificateValidation, "HTTP TLS skip certificate validation mismatch") + require.Equal(t, xHTTP.TLS.CA.Value, yHTTP.TLSConfig.CA.Value, "HTTP TLS CA mismatch") + require.Equal(t, xHTTP.TLS.Cert.Value, yHTTP.TLSConfig.Cert.Value, "HTTP TLS cert mismatch") + require.Equal(t, xHTTP.TLS.Key.Value, yHTTP.TLSConfig.Key.Value, "HTTP TLS key mismatch") xOTLP := x.Spec.Output.OTLP yOTLP := y.Spec.Output.OTLP diff --git a/apis/telemetry/v1alpha1/logpipeline_types.go b/apis/telemetry/v1alpha1/logpipeline_types.go index f9fec4fe5..fc3f30d31 100644 --- a/apis/telemetry/v1alpha1/logpipeline_types.go +++ b/apis/telemetry/v1alpha1/logpipeline_types.go @@ -29,41 +29,69 @@ const ( FluentBit ) +//nolint:gochecknoinits // SchemeBuilder's registration is required. +func init() { + SchemeBuilder.Register(&LogPipeline{}, &LogPipelineList{}) +} + +// +kubebuilder:object:root=true +// LogPipelineList contains a list of LogPipeline +type LogPipelineList struct { + metav1.TypeMeta `json:",inline"` + metav1.ListMeta `json:"metadata,omitempty"` + Items []LogPipeline `json:"items"` +} + +// +kubebuilder:object:root=true +// +kubebuilder:resource:scope=Cluster,categories={kyma-telemetry,kyma-telemetry-pipelines} +// +kubebuilder:subresource:status +// +kubebuilder:printcolumn:name="Configuration Generated",type=string,JSONPath=`.status.conditions[?(@.type=="ConfigurationGenerated")].status` +// +kubebuilder:printcolumn:name="Agent Healthy",type=string,JSONPath=`.status.conditions[?(@.type=="AgentHealthy")].status` +// +kubebuilder:printcolumn:name="Flow Healthy",type=string,JSONPath=`.status.conditions[?(@.type=="TelemetryFlowHealthy")].status` +// +kubebuilder:printcolumn:name="Unsupported Mode",type=boolean,JSONPath=`.status.unsupportedMode` +// +kubebuilder:printcolumn:name="Age",type=date,JSONPath=`.metadata.creationTimestamp` +// LogPipeline is the Schema for the logpipelines API +type LogPipeline struct { + metav1.TypeMeta `json:",inline"` + metav1.ObjectMeta `json:"metadata,omitempty"` + + // Defines the desired state of LogPipeline + Spec LogPipelineSpec `json:"spec,omitempty"` + // Shows the observed state of the LogPipeline + Status LogPipelineStatus `json:"status,omitempty"` +} + // LogPipelineSpec defines the desired state of LogPipeline // +kubebuilder:validation:XValidation:rule="!((has(self.output.http) || has(self.output.custom)) && has(self.input.otlp))", message="otlp input is only supported with otlp output" type LogPipelineSpec struct { - // INSERT ADDITIONAL SPEC FIELDS - desired state of cluster - // Important: Run "make" to regenerate code after modifying this file - // Defines where to collect logs, including selector mechanisms. - Input Input `json:"input,omitempty"` - Filters []Filter `json:"filters,omitempty"` + Input LogPipelineInput `json:"input,omitempty"` + Filters []LogPipelineFilter `json:"filters,omitempty"` // [Fluent Bit output](https://docs.fluentbit.io/manual/pipeline/outputs) where you want to push the logs. Only one output can be specified. - Output Output `json:"output,omitempty"` - Files []FileMount `json:"files,omitempty"` + Output LogPipelineOutput `json:"output,omitempty"` + Files []LogPipelineFileMount `json:"files,omitempty"` // A list of mappings from Kubernetes Secret keys to environment variables. Mapped keys are mounted as environment variables, so that they are available as [Variables](https://docs.fluentbit.io/manual/administration/configuring-fluent-bit/classic-mode/variables) in the sections. - Variables []VariableRef `json:"variables,omitempty"` + Variables []LogPipelineVariableRef `json:"variables,omitempty"` } -// Input describes a log input for a LogPipeline. -type Input struct { +// LogPipelineInput describes a log input for a LogPipeline. +type LogPipelineInput struct { // Configures in more detail from which containers application logs are enabled as input. - Application *ApplicationInput `json:"application,omitempty"` - + Application *LogPipelineApplicationInput `json:"application,omitempty"` // Configures an endpoint to receive logs from a OTLP source. OTLP *OTLPInput `json:"otlp,omitempty"` } -// ApplicationInput specifies the default type of Input that handles application logs from runtime containers. It configures in more detail from which containers logs are selected as input. -type ApplicationInput struct { +// LogPipelineApplicationInput specifies the default type of Input that handles application logs from runtime containers. It configures in more detail from which containers logs are selected as input. +type LogPipelineApplicationInput struct { // If enabled, application logs are collected. The default is `true`. // +optional // +kubebuilder:default=true Enabled *bool `json:"enabled,omitempty"` // Describes whether application logs from specific Namespaces are selected. The options are mutually exclusive. System Namespaces are excluded by default from the collection. - Namespaces InputNamespaces `json:"namespaces,omitempty"` + Namespaces LogPipelineNamespaceSelector `json:"namespaces,omitempty"` // Describes whether application logs from specific containers are selected. The options are mutually exclusive. - Containers InputContainers `json:"containers,omitempty"` + Containers LogPipelineContainerSelector `json:"containers,omitempty"` // Defines whether to keep all Kubernetes annotations. The default is `false`. KeepAnnotations bool `json:"keepAnnotations,omitempty"` // Defines whether to drop all Kubernetes labels. The default is `false`. @@ -74,8 +102,8 @@ type ApplicationInput struct { KeepOriginalBody *bool `json:"keepOriginalBody,omitempty"` } -// InputNamespaces describes whether application logs from specific Namespaces are selected. The options are mutually exclusive. System Namespaces are excluded by default from the collection. -type InputNamespaces struct { +// LogPipelineNamespaceSelector describes whether application logs from specific Namespaces are selected. The options are mutually exclusive. System Namespaces are excluded by default from the collection. +type LogPipelineNamespaceSelector struct { // Include only the container logs of the specified Namespace names. Include []string `json:"include,omitempty"` // Exclude the container logs of the specified Namespace names. @@ -84,8 +112,8 @@ type InputNamespaces struct { System bool `json:"system,omitempty"` } -// InputContainers describes whether application logs from specific containers are selected. The options are mutually exclusive. -type InputContainers struct { +// LogPipelineContainerSelector describes whether application logs from specific containers are selected. The options are mutually exclusive. +type LogPipelineContainerSelector struct { // Specifies to include only the container logs with the specified container names. Include []string `json:"include,omitempty"` // Specifies to exclude only the container logs with the specified container names. @@ -93,13 +121,27 @@ type InputContainers struct { } // Describes a filtering option on the logs of the pipeline. -type Filter struct { +type LogPipelineFilter struct { // Custom filter definition in the Fluent Bit syntax. Note: If you use a `custom` filter, you put the LogPipeline in unsupported mode. Custom string `json:"custom,omitempty"` } -// HTTPOutput configures an HTTP-based output compatible with the Fluent Bit HTTP output plugin. -type HTTPOutput struct { +// LogPipelineOutput describes a Fluent Bit output configuration section. +// +kubebuilder:validation:XValidation:rule="has(self.otlp) == has(oldSelf.otlp)", message="Switching to or away from OTLP output is not supported" +// +kubebuilder:validation:XValidation:rule="(!has(self.custom) && !has(self.http)) || !(has(self.custom) && has(self.http))", message="Exactly one output must be defined" +// +kubebuilder:validation:XValidation:rule="(!has(self.custom) && !has(self.otlp)) || ! (has(self.custom) && has(self.otlp))", message="Exactly one output must be defined" +// +kubebuilder:validation:XValidation:rule="(!has(self.http) && !has(self.otlp)) || ! (has(self.http) && has(self.otlp))", message="Exactly one output must be defined" +type LogPipelineOutput struct { + // Defines a custom output in the Fluent Bit syntax. Note: If you use a `custom` output, you put the LogPipeline in unsupported mode. + Custom string `json:"custom,omitempty"` + // Configures an HTTP-based output compatible with the Fluent Bit HTTP output plugin. + HTTP *LogPipelineHTTPOutput `json:"http,omitempty"` + // Defines an output using the OpenTelemetry protocol. + OTLP *OTLPOutput `json:"otlp,omitempty"` +} + +// LogPipelineHTTPOutput configures an HTTP-based output compatible with the Fluent Bit HTTP output plugin. +type LogPipelineHTTPOutput struct { // Defines the host of the HTTP receiver. Host ValueType `json:"host,omitempty"` // Defines the basic auth user. @@ -115,13 +157,13 @@ type HTTPOutput struct { // Data format to be used in the HTTP request body. Default is `json`. Format string `json:"format,omitempty"` // Configures TLS for the HTTP target server. - TLSConfig TLSConfig `json:"tls,omitempty"` + TLS LogPipelineOutputTLS `json:"tls,omitempty"` // Enables de-dotting of Kubernetes labels and annotations for compatibility with ElasticSearch based backends. Dots (.) will be replaced by underscores (_). Default is `false`. Dedot bool `json:"dedot,omitempty"` } // +kubebuilder:validation:XValidation:rule="has(self.cert) == has(self.key)", message="Can define either both 'cert' and 'key', or neither" -type TLSConfig struct { +type LogPipelineOutputTLS struct { // Indicates if TLS is disabled or enabled. Default is `false`. Disabled bool `json:"disabled,omitempty"` // If `true`, the validation of certificates is skipped. Default is `false`. @@ -134,45 +176,52 @@ type TLSConfig struct { Key *ValueType `json:"key,omitempty"` } -// Output describes a Fluent Bit output configuration section. -// +kubebuilder:validation:XValidation:rule="has(self.otlp) == has(oldSelf.otlp)", message="Switching to or away from OTLP output is not supported" -// +kubebuilder:validation:XValidation:rule="(!has(self.custom) && !has(self.http)) || !(has(self.custom) && has(self.http))", message="Exactly one output must be defined" -// +kubebuilder:validation:XValidation:rule="(!has(self.custom) && !has(self.otlp)) || ! (has(self.custom) && has(self.otlp))", message="Exactly one output must be defined" -// +kubebuilder:validation:XValidation:rule="(!has(self.http) && !has(self.otlp)) || ! (has(self.http) && has(self.otlp))", message="Exactly one output must be defined" -type Output struct { - // Defines a custom output in the Fluent Bit syntax. Note: If you use a `custom` output, you put the LogPipeline in unsupported mode. - Custom string `json:"custom,omitempty"` - // Configures an HTTP-based output compatible with the Fluent Bit HTTP output plugin. - HTTP *HTTPOutput `json:"http,omitempty"` - // Defines an output using the OpenTelemetry protocol. - OTLP *OTLPOutput `json:"otlp,omitempty"` +// Provides file content to be consumed by a LogPipeline configuration +type LogPipelineFileMount struct { + Name string `json:"name,omitempty"` + Content string `json:"content,omitempty"` } -func (i *Input) IsValid() bool { +// References a Kubernetes secret that should be provided as environment variable to Fluent Bit +type LogPipelineVariableRef struct { + // Name of the variable to map. + Name string `json:"name,omitempty"` + ValueFrom ValueFromSource `json:"valueFrom,omitempty"` +} + +// LogPipelineStatus shows the observed state of the LogPipeline +type LogPipelineStatus struct { + // An array of conditions describing the status of the pipeline. + Conditions []metav1.Condition `json:"conditions,omitempty"` + // Is active when the LogPipeline uses a `custom` output or filter; see [unsupported mode](https://github.com/kyma-project/telemetry-manager/blob/main/docs/user/02-logs.md#unsupported-mode). + UnsupportedMode *bool `json:"unsupportedMode,omitempty"` +} + +func (i *LogPipelineInput) IsValid() bool { return i != nil } -func (o *Output) IsCustomDefined() bool { +func (o *LogPipelineOutput) IsCustomDefined() bool { return o.Custom != "" } -func (o *Output) IsHTTPDefined() bool { +func (o *LogPipelineOutput) IsHTTPDefined() bool { return o.HTTP != nil && o.HTTP.Host.IsValid() } -func (o *Output) IsOTLPDefined() bool { +func (o *LogPipelineOutput) IsOTLPDefined() bool { return o.OTLP != nil } -func (o *Output) IsAnyDefined() bool { +func (o *LogPipelineOutput) IsAnyDefined() bool { return o.pluginCount() > 0 } -func (o *Output) IsSingleDefined() bool { +func (o *LogPipelineOutput) IsSingleDefined() bool { return o.pluginCount() == 1 } -func (o *Output) pluginCount() int { +func (o *LogPipelineOutput) pluginCount() int { plugins := 0 if o.IsCustomDefined() { plugins++ @@ -189,47 +238,6 @@ func (o *Output) pluginCount() int { return plugins } -// Provides file content to be consumed by a LogPipeline configuration -type FileMount struct { - Name string `json:"name,omitempty"` - Content string `json:"content,omitempty"` -} - -// References a Kubernetes secret that should be provided as environment variable to Fluent Bit -type VariableRef struct { - // Name of the variable to map. - Name string `json:"name,omitempty"` - ValueFrom ValueFromSource `json:"valueFrom,omitempty"` -} - -// LogPipelineStatus shows the observed state of the LogPipeline -type LogPipelineStatus struct { - // An array of conditions describing the status of the pipeline. - Conditions []metav1.Condition `json:"conditions,omitempty"` - // Is active when the LogPipeline uses a `custom` output or filter; see [unsupported mode](https://github.com/kyma-project/telemetry-manager/blob/main/docs/user/02-logs.md#unsupported-mode). - UnsupportedMode *bool `json:"unsupportedMode,omitempty"` -} - -// +kubebuilder:object:root=true -// +kubebuilder:resource:scope=Cluster,categories={kyma-telemetry,kyma-telemetry-pipelines} -// +kubebuilder:subresource:status -// +kubebuilder:printcolumn:name="Configuration Generated",type=string,JSONPath=`.status.conditions[?(@.type=="ConfigurationGenerated")].status` -// +kubebuilder:printcolumn:name="Agent Healthy",type=string,JSONPath=`.status.conditions[?(@.type=="AgentHealthy")].status` -// +kubebuilder:printcolumn:name="Flow Healthy",type=string,JSONPath=`.status.conditions[?(@.type=="TelemetryFlowHealthy")].status` -// +kubebuilder:printcolumn:name="Unsupported Mode",type=boolean,JSONPath=`.status.unsupportedMode` -// +kubebuilder:printcolumn:name="Age",type=date,JSONPath=`.metadata.creationTimestamp` - -// LogPipeline is the Schema for the logpipelines API -type LogPipeline struct { - metav1.TypeMeta `json:",inline"` - metav1.ObjectMeta `json:"metadata,omitempty"` - - // Defines the desired state of LogPipeline - Spec LogPipelineSpec `json:"spec,omitempty"` - // Shows the observed state of the LogPipeline - Status LogPipelineStatus `json:"status,omitempty"` -} - // ContainsCustomPlugin returns true if the pipeline contains any custom filters or outputs func (lp *LogPipeline) ContainsCustomPlugin() bool { for _, filter := range lp.Spec.Filters { @@ -240,16 +248,3 @@ func (lp *LogPipeline) ContainsCustomPlugin() bool { return lp.Spec.Output.IsCustomDefined() } - -// +kubebuilder:object:root=true -// LogPipelineList contains a list of LogPipeline -type LogPipelineList struct { - metav1.TypeMeta `json:",inline"` - metav1.ListMeta `json:"metadata,omitempty"` - Items []LogPipeline `json:"items"` -} - -//nolint:gochecknoinits // SchemeBuilder's registration is required. -func init() { - SchemeBuilder.Register(&LogPipeline{}, &LogPipelineList{}) -} diff --git a/apis/telemetry/v1alpha1/logpipeline_types_test.go b/apis/telemetry/v1alpha1/logpipeline_types_test.go index 08b64520b..d1775107b 100644 --- a/apis/telemetry/v1alpha1/logpipeline_types_test.go +++ b/apis/telemetry/v1alpha1/logpipeline_types_test.go @@ -9,7 +9,7 @@ import ( func TestLogPipelineOutput(t *testing.T) { tests := []struct { name string - given Output + given LogPipelineOutput expectedCustom bool expectedHTTP bool expectedLoki bool @@ -18,21 +18,21 @@ func TestLogPipelineOutput(t *testing.T) { }{ { name: "custom", - given: Output{Custom: "name: null"}, + given: LogPipelineOutput{Custom: "name: null"}, expectedCustom: true, expectedAny: true, expectedSingle: true, }, { name: "http", - given: Output{HTTP: &HTTPOutput{Host: ValueType{Value: "localhost"}}}, + given: LogPipelineOutput{HTTP: &LogPipelineHTTPOutput{Host: ValueType{Value: "localhost"}}}, expectedHTTP: true, expectedAny: true, expectedSingle: true, }, { name: "invalid: none defined", - given: Output{}, + given: LogPipelineOutput{}, expectedAny: false, expectedSingle: false, }, @@ -50,7 +50,7 @@ func TestLogPipelineOutput(t *testing.T) { func TestLogPipelineContainsCustomPluginWithCustomFilter(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Filters: []Filter{ + Filters: []LogPipelineFilter{ {Custom: ` Name some-filter`, }, @@ -65,7 +65,7 @@ func TestLogPipelineContainsCustomPluginWithCustomFilter(t *testing.T) { func TestLogPipelineContainsCustomPluginWithCustomOutput(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Output: Output{ + Output: LogPipelineOutput{ Custom: ` Name some-output`, }, diff --git a/apis/telemetry/v1alpha1/logpipeline_validation.go b/apis/telemetry/v1alpha1/logpipeline_validation.go index 5fa24cd03..2eabadeef 100644 --- a/apis/telemetry/v1alpha1/logpipeline_validation.go +++ b/apis/telemetry/v1alpha1/logpipeline_validation.go @@ -50,7 +50,7 @@ func (lp *LogPipeline) validateOutput() error { return validateCustomOutput(output.Custom) } -func checkSingleOutputPlugin(output Output) error { +func checkSingleOutputPlugin(output LogPipelineOutput) error { if !output.IsAnyDefined() { return fmt.Errorf("no output plugin is defined, you must define one output plugin") } @@ -62,7 +62,7 @@ func checkSingleOutputPlugin(output Output) error { return nil } -func validateHTTPOutput(httpOutput *HTTPOutput) error { +func validateHTTPOutput(httpOutput *LogPipelineHTTPOutput) error { isValidHostname := validHostname(httpOutput.Host.Value) if httpOutput.Host.Value != "" && !isValidHostname { diff --git a/apis/telemetry/v1alpha1/logpipeline_validation_test.go b/apis/telemetry/v1alpha1/logpipeline_validation_test.go index 2edfca28b..d0028377b 100644 --- a/apis/telemetry/v1alpha1/logpipeline_validation_test.go +++ b/apis/telemetry/v1alpha1/logpipeline_validation_test.go @@ -10,7 +10,7 @@ import ( func TestContainsNoOutputPlugins(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Output: Output{}, + Output: LogPipelineOutput{}, }} result := logPipeline.validateOutput() @@ -22,9 +22,9 @@ func TestContainsNoOutputPlugins(t *testing.T) { func TestContainsMultipleOutputPlugins(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Output: Output{ + Output: LogPipelineOutput{ Custom: `Name http`, - HTTP: &HTTPOutput{ + HTTP: &LogPipelineHTTPOutput{ Host: ValueType{ Value: "localhost", }, @@ -40,7 +40,7 @@ func TestContainsMultipleOutputPlugins(t *testing.T) { func TestValidateCustomOutput(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Output: Output{ + Output: LogPipelineOutput{ Custom: ` name http`, }, @@ -54,7 +54,7 @@ func TestValidateCustomOutput(t *testing.T) { func TestValidateCustomHasForbiddenParameter(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Output: Output{ + Output: LogPipelineOutput{ Custom: ` name http storage.total_limit_size 10G`, @@ -69,7 +69,7 @@ func TestValidateCustomHasForbiddenParameter(t *testing.T) { func TestValidateCustomOutputsContainsNoName(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Output: Output{ + Output: LogPipelineOutput{ Custom: ` Regex .*`, }, @@ -85,8 +85,8 @@ func TestValidateCustomOutputsContainsNoName(t *testing.T) { func TestBothValueAndValueFromPresent(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Output: Output{ - HTTP: &HTTPOutput{ + Output: LogPipelineOutput{ + HTTP: &LogPipelineHTTPOutput{ Host: ValueType{ Value: "localhost", ValueFrom: &ValueFromSource{ @@ -108,8 +108,8 @@ func TestBothValueAndValueFromPresent(t *testing.T) { func TestValueFromSecretKeyRef(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Output: Output{ - HTTP: &HTTPOutput{ + Output: LogPipelineOutput{ + HTTP: &LogPipelineHTTPOutput{ Host: ValueType{ ValueFrom: &ValueFromSource{ SecretKeyRef: &SecretKeyRef{ @@ -130,7 +130,7 @@ func TestValidateCustomFilter(t *testing.T) { logPipeline := &LogPipeline{ ObjectMeta: metav1.ObjectMeta{Name: "foo"}, Spec: LogPipelineSpec{ - Output: Output{ + Output: LogPipelineOutput{ Custom: ` Name http`, }, @@ -144,7 +144,7 @@ func TestValidateCustomFilter(t *testing.T) { func TestValidateCustomFiltersContainsNoName(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Filters: []Filter{ + Filters: []LogPipelineFilter{ {Custom: ` Match *`, }, @@ -160,7 +160,7 @@ func TestValidateCustomFiltersContainsNoName(t *testing.T) { func TestValidateCustomFiltersContainsMatch(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Filters: []Filter{ + Filters: []LogPipelineFilter{ {Custom: ` Name grep Match *`, @@ -179,7 +179,7 @@ func TestDeniedFilterPlugins(t *testing.T) { logPipeline := &LogPipeline{ ObjectMeta: metav1.ObjectMeta{Name: "foo"}, Spec: LogPipelineSpec{ - Filters: []Filter{ + Filters: []LogPipelineFilter{ {Custom: ` Name kubernetes`, }, @@ -196,12 +196,12 @@ func TestDeniedFilterPlugins(t *testing.T) { func TestValidateWithValidInputIncludes(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Input: Input{ - Application: &ApplicationInput{ - Namespaces: InputNamespaces{ + Input: LogPipelineInput{ + Application: &LogPipelineApplicationInput{ + Namespaces: LogPipelineNamespaceSelector{ Include: []string{"namespace-1", "namespace-2"}, }, - Containers: InputContainers{ + Containers: LogPipelineContainerSelector{ Include: []string{"container-1"}, }, }, @@ -215,12 +215,12 @@ func TestValidateWithValidInputIncludes(t *testing.T) { func TestValidateWithValidInputExcludes(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Input: Input{ - Application: &ApplicationInput{ - Namespaces: InputNamespaces{ + Input: LogPipelineInput{ + Application: &LogPipelineApplicationInput{ + Namespaces: LogPipelineNamespaceSelector{ Exclude: []string{"namespace-1", "namespace-2"}, }, - Containers: InputContainers{ + Containers: LogPipelineContainerSelector{ Exclude: []string{"container-1"}, }, }, @@ -235,12 +235,12 @@ func TestValidateWithValidInputExcludes(t *testing.T) { func TestValidateWithValidInputIncludeContainersSystemFlag(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Input: Input{ - Application: &ApplicationInput{ - Namespaces: InputNamespaces{ + Input: LogPipelineInput{ + Application: &LogPipelineApplicationInput{ + Namespaces: LogPipelineNamespaceSelector{ System: true, }, - Containers: InputContainers{ + Containers: LogPipelineContainerSelector{ Include: []string{"container-1"}, }, }, @@ -255,12 +255,12 @@ func TestValidateWithValidInputIncludeContainersSystemFlag(t *testing.T) { func TestValidateWithValidInputExcludeContainersSystemFlag(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Input: Input{ - Application: &ApplicationInput{ - Namespaces: InputNamespaces{ + Input: LogPipelineInput{ + Application: &LogPipelineApplicationInput{ + Namespaces: LogPipelineNamespaceSelector{ System: true, }, - Containers: InputContainers{ + Containers: LogPipelineContainerSelector{ Exclude: []string{"container-1"}, }, }, @@ -275,9 +275,9 @@ func TestValidateWithValidInputExcludeContainersSystemFlag(t *testing.T) { func TestValidateWithInvalidNamespaceSelectors(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Input: Input{ - Application: &ApplicationInput{ - Namespaces: InputNamespaces{ + Input: LogPipelineInput{ + Application: &LogPipelineApplicationInput{ + Namespaces: LogPipelineNamespaceSelector{ Include: []string{"namespace-1", "namespace-2"}, Exclude: []string{"namespace-3"}, }, @@ -293,9 +293,9 @@ func TestValidateWithInvalidNamespaceSelectors(t *testing.T) { func TestValidateWithInvalidIncludeSystemFlag(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Input: Input{ - Application: &ApplicationInput{ - Namespaces: InputNamespaces{ + Input: LogPipelineInput{ + Application: &LogPipelineApplicationInput{ + Namespaces: LogPipelineNamespaceSelector{ Include: []string{"namespace-1", "namespace-2"}, System: true, }, @@ -311,9 +311,9 @@ func TestValidateWithInvalidIncludeSystemFlag(t *testing.T) { func TestValidateWithInvalidExcludeSystemFlag(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Input: Input{ - Application: &ApplicationInput{ - Namespaces: InputNamespaces{ + Input: LogPipelineInput{ + Application: &LogPipelineApplicationInput{ + Namespaces: LogPipelineNamespaceSelector{ Exclude: []string{"namespace-3"}, System: true, }, @@ -329,9 +329,9 @@ func TestValidateWithInvalidExcludeSystemFlag(t *testing.T) { func TestValidateWithInvalidContainerSelectors(t *testing.T) { logPipeline := &LogPipeline{ Spec: LogPipelineSpec{ - Input: Input{ - Application: &ApplicationInput{ - Containers: InputContainers{ + Input: LogPipelineInput{ + Application: &LogPipelineApplicationInput{ + Containers: LogPipelineContainerSelector{ Include: []string{"container-1", "container-2"}, Exclude: []string{"container-3"}, }, diff --git a/apis/telemetry/v1alpha1/metricpipeline_types.go b/apis/telemetry/v1alpha1/metricpipeline_types.go index 476b984c1..d179f18c7 100644 --- a/apis/telemetry/v1alpha1/metricpipeline_types.go +++ b/apis/telemetry/v1alpha1/metricpipeline_types.go @@ -26,7 +26,6 @@ func init() { } // +kubebuilder:object:root=true - // MetricPipelineList contains a list of MetricPipeline. type MetricPipelineList struct { metav1.TypeMeta `json:",inline"` @@ -90,7 +89,7 @@ type MetricPipelinePrometheusInput struct { Namespaces *NamespaceSelector `json:"namespaces,omitempty"` // Configures diagnostic metrics scraping // +optional - DiagnosticMetrics *DiagnosticMetrics `json:"diagnosticMetrics,omitempty"` + DiagnosticMetrics *MetricPipelineIstioInputDiagnosticMetrics `json:"diagnosticMetrics,omitempty"` } // MetricPipelineRuntimeInput defines the runtime scraping section. @@ -152,7 +151,13 @@ type MetricPipelineIstioInput struct { Namespaces *NamespaceSelector `json:"namespaces,omitempty"` // Configures diagnostic metrics scraping // +optional - DiagnosticMetrics *DiagnosticMetrics `json:"diagnosticMetrics,omitempty"` + DiagnosticMetrics *MetricPipelineIstioInputDiagnosticMetrics `json:"diagnosticMetrics,omitempty"` +} + +// MetricPipelineIstioInputDiagnosticMetrics defines the diagnostic metrics configuration section +type MetricPipelineIstioInputDiagnosticMetrics struct { + // If enabled, diagnostic metrics are scraped. The default is `false`. + Enabled bool `json:"enabled,omitempty"` } // MetricPipelineOutput defines the output configuration section. @@ -161,12 +166,6 @@ type MetricPipelineOutput struct { OTLP *OTLPOutput `json:"otlp"` } -// DiagnosticMetrics defines the diagnostic metrics configuration section -type DiagnosticMetrics struct { - // If enabled, diagnostic metrics are scraped. The default is `false`. - Enabled bool `json:"enabled,omitempty"` -} - // MetricPipelineStatus defines the observed state of MetricPipeline. type MetricPipelineStatus struct { // An array of conditions describing the status of the pipeline. diff --git a/apis/telemetry/v1alpha1/secret_refs.go b/apis/telemetry/v1alpha1/secret_refs.go index 542b5ad9c..ae5a6175b 100644 --- a/apis/telemetry/v1alpha1/secret_refs.go +++ b/apis/telemetry/v1alpha1/secret_refs.go @@ -34,7 +34,7 @@ func (lp *LogPipeline) GetTLSSecretRefs() []SecretKeyRef { output := lp.Spec.Output if output.HTTP != nil { - tlsConfig := output.HTTP.TLSConfig + tlsConfig := output.HTTP.TLS if tlsConfig.CA != nil { refs = appendIfSecretRef(refs, *tlsConfig.CA) } diff --git a/apis/telemetry/v1alpha1/secret_refs_test.go b/apis/telemetry/v1alpha1/secret_refs_test.go index 1c0443c3e..0e72c48c8 100644 --- a/apis/telemetry/v1alpha1/secret_refs_test.go +++ b/apis/telemetry/v1alpha1/secret_refs_test.go @@ -17,7 +17,7 @@ func TestLogPipeline_GetSecretRefs(t *testing.T) { name: "only variables", given: LogPipeline{ Spec: LogPipelineSpec{ - Variables: []VariableRef{ + Variables: []LogPipelineVariableRef{ { Name: "password-1", ValueFrom: ValueFromSource{ @@ -46,8 +46,8 @@ func TestLogPipeline_GetSecretRefs(t *testing.T) { Name: "cls", }, Spec: LogPipelineSpec{ - Output: Output{ - HTTP: &HTTPOutput{ + Output: LogPipelineOutput{ + HTTP: &LogPipelineHTTPOutput{ Host: ValueType{ ValueFrom: &ValueFromSource{ SecretKeyRef: &SecretKeyRef{ @@ -86,8 +86,8 @@ func TestLogPipeline_GetSecretRefs(t *testing.T) { Name: "cls", }, Spec: LogPipelineSpec{ - Output: Output{ - HTTP: &HTTPOutput{ + Output: LogPipelineOutput{ + HTTP: &LogPipelineHTTPOutput{ Host: ValueType{ ValueFrom: &ValueFromSource{ SecretKeyRef: &SecretKeyRef{ diff --git a/apis/telemetry/v1alpha1/shared_types.go b/apis/telemetry/v1alpha1/shared_types.go index 69c6be033..a2cfe665b 100644 --- a/apis/telemetry/v1alpha1/shared_types.go +++ b/apis/telemetry/v1alpha1/shared_types.go @@ -48,29 +48,6 @@ func (skr *SecretKeyRef) NamespacedName() types.NamespacedName { return types.NamespacedName{Name: skr.Name, Namespace: skr.Namespace} } -type Header struct { - // Defines the header name. - Name string `json:"name"` - // Defines the header value. - ValueType `json:",inline"` - // Defines an optional header value prefix. The prefix is separated from the value by a space character. - Prefix string `json:"prefix,omitempty"` -} - -// +kubebuilder:validation:XValidation:rule="has(self.cert) == has(self.key)", message="Can define either both 'cert' and 'key', or neither" -type OTLPTLS struct { - // Defines whether to send requests using plaintext instead of TLS. - Insecure bool `json:"insecure,omitempty"` - // Defines whether to skip server certificate verification when using TLS. - InsecureSkipVerify bool `json:"insecureSkipVerify,omitempty"` - // Defines an optional CA certificate for server certificate verification when using TLS. The certificate must be provided in PEM format. - CA *ValueType `json:"ca,omitempty"` - // Defines a client certificate to use when using TLS. The certificate must be provided in PEM format. - Cert *ValueType `json:"cert,omitempty"` - // Defines the client key to use when using TLS. The key must be provided in PEM format. - Key *ValueType `json:"key,omitempty"` -} - const ( OTLPProtocolHTTP string = "http" OTLPProtocolGRPC string = "grpc" @@ -111,6 +88,29 @@ type BasicAuthOptions struct { Password ValueType `json:"password"` } +type Header struct { + // Defines the header name. + Name string `json:"name"` + // Defines the header value. + ValueType `json:",inline"` + // Defines an optional header value prefix. The prefix is separated from the value by a space character. + Prefix string `json:"prefix,omitempty"` +} + +// +kubebuilder:validation:XValidation:rule="has(self.cert) == has(self.key)", message="Can define either both 'cert' and 'key', or neither" +type OTLPTLS struct { + // Defines whether to send requests using plaintext instead of TLS. + Insecure bool `json:"insecure,omitempty"` + // Defines whether to skip server certificate verification when using TLS. + InsecureSkipVerify bool `json:"insecureSkipVerify,omitempty"` + // Defines an optional CA certificate for server certificate verification when using TLS. The certificate must be provided in PEM format. + CA *ValueType `json:"ca,omitempty"` + // Defines a client certificate to use when using TLS. The certificate must be provided in PEM format. + Cert *ValueType `json:"cert,omitempty"` + // Defines the client key to use when using TLS. The key must be provided in PEM format. + Key *ValueType `json:"key,omitempty"` +} + // OTLPInput defines the collection of push-based metrics that use the OpenTelemetry protocol. type OTLPInput struct { // If disabled, push-based OTLP signals are not collected. The default is `false`. diff --git a/apis/telemetry/v1alpha1/tracepipeline_types.go b/apis/telemetry/v1alpha1/tracepipeline_types.go index 855b39d59..25e4107fe 100644 --- a/apis/telemetry/v1alpha1/tracepipeline_types.go +++ b/apis/telemetry/v1alpha1/tracepipeline_types.go @@ -20,22 +20,17 @@ import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) -// TracePipelineSpec defines the desired state of TracePipeline -type TracePipelineSpec struct { - // Defines a destination for shipping trace data. Only one can be defined per pipeline. - Output TracePipelineOutput `json:"output"` -} - -// TracePipelineOutput defines the output configuration section. -type TracePipelineOutput struct { - // Configures the underlying OTel Collector with an [OTLP exporter](https://github.com/open-telemetry/opentelemetry-collector/blob/main/exporter/otlpexporter/README.md). If you switch `protocol`to `http`, an [OTLP HTTP exporter](https://github.com/open-telemetry/opentelemetry-collector/tree/main/exporter/otlphttpexporter) is used. - OTLP *OTLPOutput `json:"otlp"` +//nolint:gochecknoinits // SchemeBuilder's registration is required. +func init() { + SchemeBuilder.Register(&TracePipeline{}, &TracePipelineList{}) } -// Defines the observed state of TracePipeline. -type TracePipelineStatus struct { - // An array of conditions describing the status of the pipeline. - Conditions []metav1.Condition `json:"conditions,omitempty"` +// +kubebuilder:object:root=true +// TracePipelineList contains a list of TracePipeline +type TracePipelineList struct { + metav1.TypeMeta `json:",inline"` + metav1.ListMeta `json:"metadata,omitempty"` + Items []TracePipeline `json:"items"` } // +kubebuilder:object:root=true @@ -45,7 +40,6 @@ type TracePipelineStatus struct { // +kubebuilder:printcolumn:name="Gateway Healthy",type=string,JSONPath=`.status.conditions[?(@.type=="GatewayHealthy")].status` // +kubebuilder:printcolumn:name="Flow Healthy",type=string,JSONPath=`.status.conditions[?(@.type=="TelemetryFlowHealthy")].status` // +kubebuilder:printcolumn:name="Age",type=date,JSONPath=`.metadata.creationTimestamp` - // TracePipeline is the Schema for the tracepipelines API type TracePipeline struct { metav1.TypeMeta `json:",inline"` @@ -57,16 +51,20 @@ type TracePipeline struct { Status TracePipelineStatus `json:"status,omitempty"` } -// +kubebuilder:object:root=true +// TracePipelineSpec defines the desired state of TracePipeline +type TracePipelineSpec struct { + // Defines a destination for shipping trace data. Only one can be defined per pipeline. + Output TracePipelineOutput `json:"output"` +} -// TracePipelineList contains a list of TracePipeline -type TracePipelineList struct { - metav1.TypeMeta `json:",inline"` - metav1.ListMeta `json:"metadata,omitempty"` - Items []TracePipeline `json:"items"` +// TracePipelineOutput defines the output configuration section. +type TracePipelineOutput struct { + // Configures the underlying OTel Collector with an [OTLP exporter](https://github.com/open-telemetry/opentelemetry-collector/blob/main/exporter/otlpexporter/README.md). If you switch `protocol`to `http`, an [OTLP HTTP exporter](https://github.com/open-telemetry/opentelemetry-collector/tree/main/exporter/otlphttpexporter) is used. + OTLP *OTLPOutput `json:"otlp"` } -//nolint:gochecknoinits // SchemeBuilder's registration is required. -func init() { - SchemeBuilder.Register(&TracePipeline{}, &TracePipelineList{}) +// Defines the observed state of TracePipeline. +type TracePipelineStatus struct { + // An array of conditions describing the status of the pipeline. + Conditions []metav1.Condition `json:"conditions,omitempty"` } diff --git a/apis/telemetry/v1alpha1/zz_generated.deepcopy.go b/apis/telemetry/v1alpha1/zz_generated.deepcopy.go index 2f45a4e97..8d037cff3 100644 --- a/apis/telemetry/v1alpha1/zz_generated.deepcopy.go +++ b/apis/telemetry/v1alpha1/zz_generated.deepcopy.go @@ -25,33 +25,6 @@ import ( runtime "k8s.io/apimachinery/pkg/runtime" ) -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ApplicationInput) DeepCopyInto(out *ApplicationInput) { - *out = *in - if in.Enabled != nil { - in, out := &in.Enabled, &out.Enabled - *out = new(bool) - **out = **in - } - in.Namespaces.DeepCopyInto(&out.Namespaces) - in.Containers.DeepCopyInto(&out.Containers) - if in.KeepOriginalBody != nil { - in, out := &in.KeepOriginalBody, &out.KeepOriginalBody - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ApplicationInput. -func (in *ApplicationInput) DeepCopy() *ApplicationInput { - if in == nil { - return nil - } - out := new(ApplicationInput) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *AuthenticationOptions) DeepCopyInto(out *AuthenticationOptions) { *out = *in @@ -90,137 +63,173 @@ func (in *BasicAuthOptions) DeepCopy() *BasicAuthOptions { } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DiagnosticMetrics) DeepCopyInto(out *DiagnosticMetrics) { +func (in *Header) DeepCopyInto(out *Header) { *out = *in + in.ValueType.DeepCopyInto(&out.ValueType) } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DiagnosticMetrics. -func (in *DiagnosticMetrics) DeepCopy() *DiagnosticMetrics { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Header. +func (in *Header) DeepCopy() *Header { if in == nil { return nil } - out := new(DiagnosticMetrics) + out := new(Header) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *FileMount) DeepCopyInto(out *FileMount) { +func (in *LogParser) DeepCopyInto(out *LogParser) { *out = *in + out.TypeMeta = in.TypeMeta + in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) + out.Spec = in.Spec + in.Status.DeepCopyInto(&out.Status) } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FileMount. -func (in *FileMount) DeepCopy() *FileMount { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogParser. +func (in *LogParser) DeepCopy() *LogParser { if in == nil { return nil } - out := new(FileMount) + out := new(LogParser) in.DeepCopyInto(out) return out } +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *LogParser) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Filter) DeepCopyInto(out *Filter) { +func (in *LogParserList) DeepCopyInto(out *LogParserList) { *out = *in + out.TypeMeta = in.TypeMeta + in.ListMeta.DeepCopyInto(&out.ListMeta) + if in.Items != nil { + in, out := &in.Items, &out.Items + *out = make([]LogParser, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Filter. -func (in *Filter) DeepCopy() *Filter { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogParserList. +func (in *LogParserList) DeepCopy() *LogParserList { if in == nil { return nil } - out := new(Filter) + out := new(LogParserList) in.DeepCopyInto(out) return out } +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *LogParserList) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *HTTPOutput) DeepCopyInto(out *HTTPOutput) { +func (in *LogParserSpec) DeepCopyInto(out *LogParserSpec) { *out = *in - in.Host.DeepCopyInto(&out.Host) - in.User.DeepCopyInto(&out.User) - in.Password.DeepCopyInto(&out.Password) - in.TLSConfig.DeepCopyInto(&out.TLSConfig) } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HTTPOutput. -func (in *HTTPOutput) DeepCopy() *HTTPOutput { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogParserSpec. +func (in *LogParserSpec) DeepCopy() *LogParserSpec { if in == nil { return nil } - out := new(HTTPOutput) + out := new(LogParserSpec) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Header) DeepCopyInto(out *Header) { +func (in *LogParserStatus) DeepCopyInto(out *LogParserStatus) { *out = *in - in.ValueType.DeepCopyInto(&out.ValueType) + if in.Conditions != nil { + in, out := &in.Conditions, &out.Conditions + *out = make([]v1.Condition, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Header. -func (in *Header) DeepCopy() *Header { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogParserStatus. +func (in *LogParserStatus) DeepCopy() *LogParserStatus { if in == nil { return nil } - out := new(Header) + out := new(LogParserStatus) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Input) DeepCopyInto(out *Input) { +func (in *LogPipeline) DeepCopyInto(out *LogPipeline) { *out = *in - if in.Application != nil { - in, out := &in.Application, &out.Application - *out = new(ApplicationInput) - (*in).DeepCopyInto(*out) - } - if in.OTLP != nil { - in, out := &in.OTLP, &out.OTLP - *out = new(OTLPInput) - (*in).DeepCopyInto(*out) - } + out.TypeMeta = in.TypeMeta + in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) + in.Spec.DeepCopyInto(&out.Spec) + in.Status.DeepCopyInto(&out.Status) } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Input. -func (in *Input) DeepCopy() *Input { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipeline. +func (in *LogPipeline) DeepCopy() *LogPipeline { if in == nil { return nil } - out := new(Input) + out := new(LogPipeline) in.DeepCopyInto(out) return out } +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *LogPipeline) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InputContainers) DeepCopyInto(out *InputContainers) { +func (in *LogPipelineApplicationInput) DeepCopyInto(out *LogPipelineApplicationInput) { *out = *in - if in.Include != nil { - in, out := &in.Include, &out.Include - *out = make([]string, len(*in)) - copy(*out, *in) + if in.Enabled != nil { + in, out := &in.Enabled, &out.Enabled + *out = new(bool) + **out = **in } - if in.Exclude != nil { - in, out := &in.Exclude, &out.Exclude - *out = make([]string, len(*in)) - copy(*out, *in) + in.Namespaces.DeepCopyInto(&out.Namespaces) + in.Containers.DeepCopyInto(&out.Containers) + if in.KeepOriginalBody != nil { + in, out := &in.KeepOriginalBody, &out.KeepOriginalBody + *out = new(bool) + **out = **in } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InputContainers. -func (in *InputContainers) DeepCopy() *InputContainers { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipelineApplicationInput. +func (in *LogPipelineApplicationInput) DeepCopy() *LogPipelineApplicationInput { if in == nil { return nil } - out := new(InputContainers) + out := new(LogPipelineApplicationInput) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InputNamespaces) DeepCopyInto(out *InputNamespaces) { +func (in *LogPipelineContainerSelector) DeepCopyInto(out *LogPipelineContainerSelector) { *out = *in if in.Include != nil { in, out := &in.Include, &out.Include @@ -234,133 +243,116 @@ func (in *InputNamespaces) DeepCopyInto(out *InputNamespaces) { } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InputNamespaces. -func (in *InputNamespaces) DeepCopy() *InputNamespaces { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipelineContainerSelector. +func (in *LogPipelineContainerSelector) DeepCopy() *LogPipelineContainerSelector { if in == nil { return nil } - out := new(InputNamespaces) + out := new(LogPipelineContainerSelector) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LogParser) DeepCopyInto(out *LogParser) { +func (in *LogPipelineFileMount) DeepCopyInto(out *LogPipelineFileMount) { *out = *in - out.TypeMeta = in.TypeMeta - in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) - out.Spec = in.Spec - in.Status.DeepCopyInto(&out.Status) } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogParser. -func (in *LogParser) DeepCopy() *LogParser { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipelineFileMount. +func (in *LogPipelineFileMount) DeepCopy() *LogPipelineFileMount { if in == nil { return nil } - out := new(LogParser) + out := new(LogPipelineFileMount) in.DeepCopyInto(out) return out } -// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. -func (in *LogParser) DeepCopyObject() runtime.Object { - if c := in.DeepCopy(); c != nil { - return c - } - return nil -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LogParserList) DeepCopyInto(out *LogParserList) { +func (in *LogPipelineFilter) DeepCopyInto(out *LogPipelineFilter) { *out = *in - out.TypeMeta = in.TypeMeta - in.ListMeta.DeepCopyInto(&out.ListMeta) - if in.Items != nil { - in, out := &in.Items, &out.Items - *out = make([]LogParser, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogParserList. -func (in *LogParserList) DeepCopy() *LogParserList { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipelineFilter. +func (in *LogPipelineFilter) DeepCopy() *LogPipelineFilter { if in == nil { return nil } - out := new(LogParserList) + out := new(LogPipelineFilter) in.DeepCopyInto(out) return out } -// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. -func (in *LogParserList) DeepCopyObject() runtime.Object { - if c := in.DeepCopy(); c != nil { - return c - } - return nil -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LogParserSpec) DeepCopyInto(out *LogParserSpec) { +func (in *LogPipelineHTTPOutput) DeepCopyInto(out *LogPipelineHTTPOutput) { *out = *in + in.Host.DeepCopyInto(&out.Host) + in.User.DeepCopyInto(&out.User) + in.Password.DeepCopyInto(&out.Password) + in.TLS.DeepCopyInto(&out.TLS) } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogParserSpec. -func (in *LogParserSpec) DeepCopy() *LogParserSpec { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipelineHTTPOutput. +func (in *LogPipelineHTTPOutput) DeepCopy() *LogPipelineHTTPOutput { if in == nil { return nil } - out := new(LogParserSpec) + out := new(LogPipelineHTTPOutput) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LogParserStatus) DeepCopyInto(out *LogParserStatus) { +func (in *LogPipelineInput) DeepCopyInto(out *LogPipelineInput) { *out = *in - if in.Conditions != nil { - in, out := &in.Conditions, &out.Conditions - *out = make([]v1.Condition, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } + if in.Application != nil { + in, out := &in.Application, &out.Application + *out = new(LogPipelineApplicationInput) + (*in).DeepCopyInto(*out) + } + if in.OTLP != nil { + in, out := &in.OTLP, &out.OTLP + *out = new(OTLPInput) + (*in).DeepCopyInto(*out) } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogParserStatus. -func (in *LogParserStatus) DeepCopy() *LogParserStatus { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipelineInput. +func (in *LogPipelineInput) DeepCopy() *LogPipelineInput { if in == nil { return nil } - out := new(LogParserStatus) + out := new(LogPipelineInput) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LogPipeline) DeepCopyInto(out *LogPipeline) { +func (in *LogPipelineList) DeepCopyInto(out *LogPipelineList) { *out = *in out.TypeMeta = in.TypeMeta - in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) - in.Spec.DeepCopyInto(&out.Spec) - in.Status.DeepCopyInto(&out.Status) + in.ListMeta.DeepCopyInto(&out.ListMeta) + if in.Items != nil { + in, out := &in.Items, &out.Items + *out = make([]LogPipeline, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipeline. -func (in *LogPipeline) DeepCopy() *LogPipeline { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipelineList. +func (in *LogPipelineList) DeepCopy() *LogPipelineList { if in == nil { return nil } - out := new(LogPipeline) + out := new(LogPipelineList) in.DeepCopyInto(out) return out } // DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. -func (in *LogPipeline) DeepCopyObject() runtime.Object { +func (in *LogPipelineList) DeepCopyObject() runtime.Object { if c := in.DeepCopy(); c != nil { return c } @@ -368,35 +360,83 @@ func (in *LogPipeline) DeepCopyObject() runtime.Object { } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LogPipelineList) DeepCopyInto(out *LogPipelineList) { +func (in *LogPipelineNamespaceSelector) DeepCopyInto(out *LogPipelineNamespaceSelector) { *out = *in - out.TypeMeta = in.TypeMeta - in.ListMeta.DeepCopyInto(&out.ListMeta) - if in.Items != nil { - in, out := &in.Items, &out.Items - *out = make([]LogPipeline, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } + if in.Include != nil { + in, out := &in.Include, &out.Include + *out = make([]string, len(*in)) + copy(*out, *in) + } + if in.Exclude != nil { + in, out := &in.Exclude, &out.Exclude + *out = make([]string, len(*in)) + copy(*out, *in) } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipelineList. -func (in *LogPipelineList) DeepCopy() *LogPipelineList { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipelineNamespaceSelector. +func (in *LogPipelineNamespaceSelector) DeepCopy() *LogPipelineNamespaceSelector { if in == nil { return nil } - out := new(LogPipelineList) + out := new(LogPipelineNamespaceSelector) in.DeepCopyInto(out) return out } -// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. -func (in *LogPipelineList) DeepCopyObject() runtime.Object { - if c := in.DeepCopy(); c != nil { - return c +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *LogPipelineOutput) DeepCopyInto(out *LogPipelineOutput) { + *out = *in + if in.HTTP != nil { + in, out := &in.HTTP, &out.HTTP + *out = new(LogPipelineHTTPOutput) + (*in).DeepCopyInto(*out) } - return nil + if in.OTLP != nil { + in, out := &in.OTLP, &out.OTLP + *out = new(OTLPOutput) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipelineOutput. +func (in *LogPipelineOutput) DeepCopy() *LogPipelineOutput { + if in == nil { + return nil + } + out := new(LogPipelineOutput) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *LogPipelineOutputTLS) DeepCopyInto(out *LogPipelineOutputTLS) { + *out = *in + if in.CA != nil { + in, out := &in.CA, &out.CA + *out = new(ValueType) + (*in).DeepCopyInto(*out) + } + if in.Cert != nil { + in, out := &in.Cert, &out.Cert + *out = new(ValueType) + (*in).DeepCopyInto(*out) + } + if in.Key != nil { + in, out := &in.Key, &out.Key + *out = new(ValueType) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipelineOutputTLS. +func (in *LogPipelineOutputTLS) DeepCopy() *LogPipelineOutputTLS { + if in == nil { + return nil + } + out := new(LogPipelineOutputTLS) + in.DeepCopyInto(out) + return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. @@ -405,18 +445,18 @@ func (in *LogPipelineSpec) DeepCopyInto(out *LogPipelineSpec) { in.Input.DeepCopyInto(&out.Input) if in.Filters != nil { in, out := &in.Filters, &out.Filters - *out = make([]Filter, len(*in)) + *out = make([]LogPipelineFilter, len(*in)) copy(*out, *in) } in.Output.DeepCopyInto(&out.Output) if in.Files != nil { in, out := &in.Files, &out.Files - *out = make([]FileMount, len(*in)) + *out = make([]LogPipelineFileMount, len(*in)) copy(*out, *in) } if in.Variables != nil { in, out := &in.Variables, &out.Variables - *out = make([]VariableRef, len(*in)) + *out = make([]LogPipelineVariableRef, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } @@ -460,6 +500,22 @@ func (in *LogPipelineStatus) DeepCopy() *LogPipelineStatus { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *LogPipelineVariableRef) DeepCopyInto(out *LogPipelineVariableRef) { + *out = *in + in.ValueFrom.DeepCopyInto(&out.ValueFrom) +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipelineVariableRef. +func (in *LogPipelineVariableRef) DeepCopy() *LogPipelineVariableRef { + if in == nil { + return nil + } + out := new(LogPipelineVariableRef) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *MetricPipeline) DeepCopyInto(out *MetricPipeline) { *out = *in @@ -532,7 +588,7 @@ func (in *MetricPipelineIstioInput) DeepCopyInto(out *MetricPipelineIstioInput) } if in.DiagnosticMetrics != nil { in, out := &in.DiagnosticMetrics, &out.DiagnosticMetrics - *out = new(DiagnosticMetrics) + *out = new(MetricPipelineIstioInputDiagnosticMetrics) **out = **in } } @@ -547,6 +603,21 @@ func (in *MetricPipelineIstioInput) DeepCopy() *MetricPipelineIstioInput { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *MetricPipelineIstioInputDiagnosticMetrics) DeepCopyInto(out *MetricPipelineIstioInputDiagnosticMetrics) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetricPipelineIstioInputDiagnosticMetrics. +func (in *MetricPipelineIstioInputDiagnosticMetrics) DeepCopy() *MetricPipelineIstioInputDiagnosticMetrics { + if in == nil { + return nil + } + out := new(MetricPipelineIstioInputDiagnosticMetrics) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *MetricPipelineList) DeepCopyInto(out *MetricPipelineList) { *out = *in @@ -609,7 +680,7 @@ func (in *MetricPipelinePrometheusInput) DeepCopyInto(out *MetricPipelinePrometh } if in.DiagnosticMetrics != nil { in, out := &in.DiagnosticMetrics, &out.DiagnosticMetrics - *out = new(DiagnosticMetrics) + *out = new(MetricPipelineIstioInputDiagnosticMetrics) **out = **in } } @@ -871,31 +942,6 @@ func (in *OTLPTLS) DeepCopy() *OTLPTLS { return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Output) DeepCopyInto(out *Output) { - *out = *in - if in.HTTP != nil { - in, out := &in.HTTP, &out.HTTP - *out = new(HTTPOutput) - (*in).DeepCopyInto(*out) - } - if in.OTLP != nil { - in, out := &in.OTLP, &out.OTLP - *out = new(OTLPOutput) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Output. -func (in *Output) DeepCopy() *Output { - if in == nil { - return nil - } - out := new(Output) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *SecretKeyRef) DeepCopyInto(out *SecretKeyRef) { *out = *in @@ -911,36 +957,6 @@ func (in *SecretKeyRef) DeepCopy() *SecretKeyRef { return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TLSConfig) DeepCopyInto(out *TLSConfig) { - *out = *in - if in.CA != nil { - in, out := &in.CA, &out.CA - *out = new(ValueType) - (*in).DeepCopyInto(*out) - } - if in.Cert != nil { - in, out := &in.Cert, &out.Cert - *out = new(ValueType) - (*in).DeepCopyInto(*out) - } - if in.Key != nil { - in, out := &in.Key, &out.Key - *out = new(ValueType) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TLSConfig. -func (in *TLSConfig) DeepCopy() *TLSConfig { - if in == nil { - return nil - } - out := new(TLSConfig) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *TracePipeline) DeepCopyInto(out *TracePipeline) { *out = *in @@ -1097,19 +1113,3 @@ func (in *ValueType) DeepCopy() *ValueType { in.DeepCopyInto(out) return out } - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *VariableRef) DeepCopyInto(out *VariableRef) { - *out = *in - in.ValueFrom.DeepCopyInto(&out.ValueFrom) -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new VariableRef. -func (in *VariableRef) DeepCopy() *VariableRef { - if in == nil { - return nil - } - out := new(VariableRef) - in.DeepCopyInto(out) - return out -} diff --git a/apis/telemetry/v1beta1/logpipeline_types.go b/apis/telemetry/v1beta1/logpipeline_types.go index 02e5ea49b..aeb0c48f0 100644 --- a/apis/telemetry/v1beta1/logpipeline_types.go +++ b/apis/telemetry/v1beta1/logpipeline_types.go @@ -27,6 +27,19 @@ const ( FluentBit ) +//nolint:gochecknoinits // SchemeBuilder's registration is required. +func init() { + SchemeBuilder.Register(&LogPipeline{}, &LogPipelineList{}) +} + +// +kubebuilder:object:root=true +// LogPipelineList contains a list of LogPipeline +type LogPipelineList struct { + metav1.TypeMeta `json:",inline"` + metav1.ListMeta `json:"metadata,omitempty"` + Items []LogPipeline `json:"items"` +} + // +kubebuilder:object:root=true // +kubebuilder:resource:scope=Cluster,categories={kyma-telemetry,kyma-telemetry-pipelines} // +kubebuilder:subresource:status @@ -36,7 +49,6 @@ const ( // +kubebuilder:printcolumn:name="Unsupported Mode",type=boolean,JSONPath=`.status.unsupportedMode` // +kubebuilder:printcolumn:name="Age",type=date,JSONPath=`.metadata.creationTimestamp` // +kubebuilder:storageversion - // LogPipeline is the Schema for the logpipelines API type LogPipeline struct { metav1.TypeMeta `json:",inline"` @@ -50,9 +62,6 @@ type LogPipeline struct { // LogPipelineSpec defines the desired state of LogPipeline type LogPipelineSpec struct { - // INSERT ADDITIONAL SPEC FIELDS - desired state of cluster - // Important: Run "make" to regenerate code after modifying this file - // Defines where to collect logs, including selector mechanisms. Input LogPipelineInput `json:"input,omitempty"` Filters []LogPipelineFilter `json:"filters,omitempty"` @@ -67,7 +76,8 @@ type LogPipelineSpec struct { type LogPipelineInput struct { // Configures in more detail from which containers application logs are enabled as input. Runtime *LogPipelineRuntimeInput `json:"runtime,omitempty"` - OTLP *OTLPInput `json:"otlp,omitempty"` + // Configures an endpoint to receive logs from a OTLP source. + OTLP *OTLPInput `json:"otlp,omitempty"` } // LogPipelineRuntimeInput specifies the default type of Input that handles application logs from runtime containers. It configures in more detail from which containers logs are selected as input. @@ -77,9 +87,9 @@ type LogPipelineRuntimeInput struct { // +kubebuilder:default=true Enabled *bool `json:"enabled,omitempty"` // Describes whether application logs from specific Namespaces are selected. The options are mutually exclusive. System Namespaces are excluded by default from the collection. - Namespaces LogPipelineInputNamespaces `json:"namespaces,omitempty"` + Namespaces LogPipelineNamespaceSelector `json:"namespaces,omitempty"` // Describes whether application logs from specific containers are selected. The options are mutually exclusive. - Containers LogPipelineInputContainers `json:"containers,omitempty"` + Containers LogPipelineContainerSelector `json:"containers,omitempty"` // Defines whether to keep all Kubernetes annotations. The default is `false`. KeepAnnotations bool `json:"keepAnnotations,omitempty"` // Defines whether to drop all Kubernetes labels. The default is `false`. @@ -90,8 +100,8 @@ type LogPipelineRuntimeInput struct { KeepOriginalBody *bool `json:"keepOriginalBody,omitempty"` } -// LogPipelineInputNamespaces describes whether application logs from specific Namespaces are selected. The options are mutually exclusive. System Namespaces are excluded by default from the collection. -type LogPipelineInputNamespaces struct { +// LogPipelineNamespaceSelector describes whether application logs from specific Namespaces are selected. The options are mutually exclusive. System Namespaces are excluded by default from the collection. +type LogPipelineNamespaceSelector struct { // Include only the container logs of the specified Namespace names. Include []string `json:"include,omitempty"` // Exclude the container logs of the specified Namespace names. @@ -100,8 +110,8 @@ type LogPipelineInputNamespaces struct { System bool `json:"system,omitempty"` } -// LogPipelineInputContainers describes whether application logs from specific containers are selected. The options are mutually exclusive. -type LogPipelineInputContainers struct { +// LogPipelineContainerSelector describes whether application logs from specific containers are selected. The options are mutually exclusive. +type LogPipelineContainerSelector struct { // Specifies to include only the container logs with the specified container names. Include []string `json:"include,omitempty"` // Specifies to exclude only the container logs with the specified container names. @@ -150,20 +160,6 @@ type LogPipelineHTTPOutput struct { Dedot bool `json:"dedot,omitempty"` } -// +kubebuilder:validation:XValidation:rule="has(self.cert) == has(self.key)", message="Can define either both 'cert' and 'key', or neither" -type OutputTLS struct { - // Indicates if TLS is disabled or enabled. Default is `false`. - Disabled bool `json:"disabled,omitempty"` - // If `true`, the validation of certificates is skipped. Default is `false`. - SkipCertificateValidation bool `json:"skipCertificateValidation,omitempty"` - // Defines an optional CA certificate for server certificate verification when using TLS. The certificate must be provided in PEM format. - CA *ValueType `json:"ca,omitempty"` - // Defines a client certificate to use when using TLS. The certificate must be provided in PEM format. - Cert *ValueType `json:"cert,omitempty"` - // Defines the client key to use when using TLS. The key must be provided in PEM format. - Key *ValueType `json:"key,omitempty"` -} - // Provides file content to be consumed by a LogPipeline configuration type LogPipelineFileMount struct { Name string `json:"name,omitempty"` @@ -185,19 +181,6 @@ type LogPipelineStatus struct { UnsupportedMode *bool `json:"unsupportedMode,omitempty"` } -// +kubebuilder:object:root=true -// LogPipelineList contains a list of LogPipeline -type LogPipelineList struct { - metav1.TypeMeta `json:",inline"` - metav1.ListMeta `json:"metadata,omitempty"` - Items []LogPipeline `json:"items"` -} - -//nolint:gochecknoinits // SchemeBuilder's registration is required. -func init() { - SchemeBuilder.Register(&LogPipeline{}, &LogPipelineList{}) -} - func (i *LogPipelineInput) IsValid() bool { return i != nil } diff --git a/apis/telemetry/v1beta1/logpipeline_validation_test.go b/apis/telemetry/v1beta1/logpipeline_validation_test.go index dc71ef084..18952ba2d 100644 --- a/apis/telemetry/v1beta1/logpipeline_validation_test.go +++ b/apis/telemetry/v1beta1/logpipeline_validation_test.go @@ -198,10 +198,10 @@ func TestValidateWithValidInputIncludes(t *testing.T) { Spec: LogPipelineSpec{ Input: LogPipelineInput{ Runtime: &LogPipelineRuntimeInput{ - Namespaces: LogPipelineInputNamespaces{ + Namespaces: LogPipelineNamespaceSelector{ Include: []string{"namespace-1", "namespace-2"}, }, - Containers: LogPipelineInputContainers{ + Containers: LogPipelineContainerSelector{ Include: []string{"container-1"}, }, }, @@ -217,10 +217,10 @@ func TestValidateWithValidInputExcludes(t *testing.T) { Spec: LogPipelineSpec{ Input: LogPipelineInput{ Runtime: &LogPipelineRuntimeInput{ - Namespaces: LogPipelineInputNamespaces{ + Namespaces: LogPipelineNamespaceSelector{ Exclude: []string{"namespace-1", "namespace-2"}, }, - Containers: LogPipelineInputContainers{ + Containers: LogPipelineContainerSelector{ Exclude: []string{"container-1"}, }, }, @@ -237,10 +237,10 @@ func TestValidateWithValidInputIncludeContainersSystemFlag(t *testing.T) { Spec: LogPipelineSpec{ Input: LogPipelineInput{ Runtime: &LogPipelineRuntimeInput{ - Namespaces: LogPipelineInputNamespaces{ + Namespaces: LogPipelineNamespaceSelector{ System: true, }, - Containers: LogPipelineInputContainers{ + Containers: LogPipelineContainerSelector{ Include: []string{"container-1"}, }, }, @@ -257,10 +257,10 @@ func TestValidateWithValidInputExcludeContainersSystemFlag(t *testing.T) { Spec: LogPipelineSpec{ Input: LogPipelineInput{ Runtime: &LogPipelineRuntimeInput{ - Namespaces: LogPipelineInputNamespaces{ + Namespaces: LogPipelineNamespaceSelector{ System: true, }, - Containers: LogPipelineInputContainers{ + Containers: LogPipelineContainerSelector{ Exclude: []string{"container-1"}, }, }, @@ -277,7 +277,7 @@ func TestValidateWithInvalidNamespaceSelectors(t *testing.T) { Spec: LogPipelineSpec{ Input: LogPipelineInput{ Runtime: &LogPipelineRuntimeInput{ - Namespaces: LogPipelineInputNamespaces{ + Namespaces: LogPipelineNamespaceSelector{ Include: []string{"namespace-1", "namespace-2"}, Exclude: []string{"namespace-3"}, }, @@ -295,7 +295,7 @@ func TestValidateWithInvalidIncludeSystemFlag(t *testing.T) { Spec: LogPipelineSpec{ Input: LogPipelineInput{ Runtime: &LogPipelineRuntimeInput{ - Namespaces: LogPipelineInputNamespaces{ + Namespaces: LogPipelineNamespaceSelector{ Include: []string{"namespace-1", "namespace-2"}, System: true, }, @@ -313,7 +313,7 @@ func TestValidateWithInvalidExcludeSystemFlag(t *testing.T) { Spec: LogPipelineSpec{ Input: LogPipelineInput{ Runtime: &LogPipelineRuntimeInput{ - Namespaces: LogPipelineInputNamespaces{ + Namespaces: LogPipelineNamespaceSelector{ Exclude: []string{"namespace-3"}, System: true, }, @@ -331,7 +331,7 @@ func TestValidateWithInvalidContainerSelectors(t *testing.T) { Spec: LogPipelineSpec{ Input: LogPipelineInput{ Runtime: &LogPipelineRuntimeInput{ - Containers: LogPipelineInputContainers{ + Containers: LogPipelineContainerSelector{ Include: []string{"container-1", "container-2"}, Exclude: []string{"container-3"}, }, diff --git a/apis/telemetry/v1beta1/metricpipeline_types.go b/apis/telemetry/v1beta1/metricpipeline_types.go index 1e135756a..19a41b00d 100644 --- a/apis/telemetry/v1beta1/metricpipeline_types.go +++ b/apis/telemetry/v1beta1/metricpipeline_types.go @@ -20,6 +20,19 @@ import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) +//nolint:gochecknoinits // SchemeBuilder's registration is required. +func init() { + SchemeBuilder.Register(&MetricPipeline{}, &MetricPipelineList{}) +} + +// +kubebuilder:object:root=true +// MetricPipelineList contains a list of MetricPipeline. +type MetricPipelineList struct { + metav1.TypeMeta `json:",inline"` + metav1.ListMeta `json:"metadata,omitempty"` + Items []MetricPipeline `json:"items"` +} + // +kubebuilder:object:root=true // +kubebuilder:resource:scope=Cluster,categories={kyma-telemetry,kyma-telemetry-pipelines} // +kubebuilder:subresource:status @@ -29,7 +42,6 @@ import ( // +kubebuilder:printcolumn:name="Flow Healthy",type=string,JSONPath=`.status.conditions[?(@.type=="TelemetryFlowHealthy")].status` // +kubebuilder:printcolumn:name="Age",type=date,JSONPath=`.metadata.creationTimestamp` // +kubebuilder:storageversion - // MetricPipeline is the Schema for the metricpipelines API. type MetricPipeline struct { metav1.TypeMeta `json:",inline"` @@ -77,7 +89,7 @@ type MetricPipelinePrometheusInput struct { Namespaces *NamespaceSelector `json:"namespaces,omitempty"` // Configures diagnostic metrics scraping // +optional - DiagnosticMetrics *DiagnosticMetrics `json:"diagnosticMetrics,omitempty"` + DiagnosticMetrics *MetricPipelineIstioInputDiagnosticMetrics `json:"diagnosticMetrics,omitempty"` } // MetricPipelineRuntimeInput defines the runtime scraping section. @@ -139,11 +151,11 @@ type MetricPipelineIstioInput struct { Namespaces *NamespaceSelector `json:"namespaces,omitempty"` // Configures diagnostic metrics scraping // +optional - DiagnosticMetrics *DiagnosticMetrics `json:"diagnosticMetrics,omitempty"` + DiagnosticMetrics *MetricPipelineIstioInputDiagnosticMetrics `json:"diagnosticMetrics,omitempty"` } -// DiagnosticMetrics defines the diagnostic metrics configuration section -type DiagnosticMetrics struct { +// MetricPipelineIstioInputDiagnosticMetrics defines the diagnostic metrics configuration section +type MetricPipelineIstioInputDiagnosticMetrics struct { // If enabled, diagnostic metrics are scraped. The default is `false`. Enabled bool `json:"enabled,omitempty"` } @@ -159,17 +171,3 @@ type MetricPipelineStatus struct { // An array of conditions describing the status of the pipeline. Conditions []metav1.Condition `json:"conditions,omitempty"` } - -// +kubebuilder:object:root=true - -// MetricPipelineList contains a list of MetricPipeline. -type MetricPipelineList struct { - metav1.TypeMeta `json:",inline"` - metav1.ListMeta `json:"metadata,omitempty"` - Items []MetricPipeline `json:"items"` -} - -//nolint:gochecknoinits // SchemeBuilder's registration is required. -func init() { - SchemeBuilder.Register(&MetricPipeline{}, &MetricPipelineList{}) -} diff --git a/apis/telemetry/v1beta1/shared_types.go b/apis/telemetry/v1beta1/shared_types.go index a6121facb..7204cc229 100644 --- a/apis/telemetry/v1beta1/shared_types.go +++ b/apis/telemetry/v1beta1/shared_types.go @@ -48,15 +48,6 @@ func (skr *SecretKeyRef) NamespacedName() types.NamespacedName { return types.NamespacedName{Name: skr.Name, Namespace: skr.Namespace} } -type Header struct { - // Defines the header name. - Name string `json:"name"` - // Defines the header value. - ValueType `json:",inline"` - // Defines an optional header value prefix. The prefix is separated from the value by a space character. - Prefix string `json:"prefix,omitempty"` -} - type OTLPProtocol string const ( @@ -98,6 +89,29 @@ type BasicAuthOptions struct { Password ValueType `json:"password"` } +type Header struct { + // Defines the header name. + Name string `json:"name"` + // Defines the header value. + ValueType `json:",inline"` + // Defines an optional header value prefix. The prefix is separated from the value by a space character. + Prefix string `json:"prefix,omitempty"` +} + +// +kubebuilder:validation:XValidation:rule="has(self.cert) == has(self.key)", message="Can define either both 'cert' and 'key', or neither" +type OutputTLS struct { + // Indicates if TLS is disabled or enabled. Default is `false`. + Disabled bool `json:"disabled,omitempty"` + // If `true`, the validation of certificates is skipped. Default is `false`. + SkipCertificateValidation bool `json:"skipCertificateValidation,omitempty"` + // Defines an optional CA certificate for server certificate verification when using TLS. The certificate must be provided in PEM format. + CA *ValueType `json:"ca,omitempty"` + // Defines a client certificate to use when using TLS. The certificate must be provided in PEM format. + Cert *ValueType `json:"cert,omitempty"` + // Defines the client key to use when using TLS. The key must be provided in PEM format. + Key *ValueType `json:"key,omitempty"` +} + // OTLPInput defines the collection of push-based metrics that use the OpenTelemetry protocol. type OTLPInput struct { // If disabled, push-based OTLP metrics are not collected. The default is `false`. diff --git a/apis/telemetry/v1beta1/tracepipeline_types.go b/apis/telemetry/v1beta1/tracepipeline_types.go index 6f92ffd1a..c48846bf7 100644 --- a/apis/telemetry/v1beta1/tracepipeline_types.go +++ b/apis/telemetry/v1beta1/tracepipeline_types.go @@ -20,6 +20,19 @@ import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) +//nolint:gochecknoinits // SchemeBuilder's registration is required. +func init() { + SchemeBuilder.Register(&TracePipeline{}, &TracePipelineList{}) +} + +// +kubebuilder:object:root=true +// TracePipelineList contains a list of TracePipeline +type TracePipelineList struct { + metav1.TypeMeta `json:",inline"` + metav1.ListMeta `json:"metadata,omitempty"` + Items []TracePipeline `json:"items"` +} + // +kubebuilder:object:root=true // +kubebuilder:resource:scope=Cluster,categories={kyma-telemetry,kyma-telemetry-pipelines} // +kubebuilder:subresource:status @@ -28,7 +41,6 @@ import ( // +kubebuilder:printcolumn:name="Flow Healthy",type=string,JSONPath=`.status.conditions[?(@.type=="TelemetryFlowHealthy")].status` // +kubebuilder:printcolumn:name="Age",type=date,JSONPath=`.metadata.creationTimestamp` // +kubebuilder:storageversion - // TracePipeline is the Schema for the tracepipelines API type TracePipeline struct { metav1.TypeMeta `json:",inline"` @@ -57,17 +69,3 @@ type TracePipelineStatus struct { // An array of conditions describing the status of the pipeline. Conditions []metav1.Condition `json:"conditions,omitempty"` } - -// +kubebuilder:object:root=true - -// TracePipelineList contains a list of TracePipeline -type TracePipelineList struct { - metav1.TypeMeta `json:",inline"` - metav1.ListMeta `json:"metadata,omitempty"` - Items []TracePipeline `json:"items"` -} - -//nolint:gochecknoinits // SchemeBuilder's registration is required. -func init() { - SchemeBuilder.Register(&TracePipeline{}, &TracePipelineList{}) -} diff --git a/apis/telemetry/v1beta1/zz_generated.deepcopy.go b/apis/telemetry/v1beta1/zz_generated.deepcopy.go index fa7fe214e..92ef02dac 100644 --- a/apis/telemetry/v1beta1/zz_generated.deepcopy.go +++ b/apis/telemetry/v1beta1/zz_generated.deepcopy.go @@ -62,21 +62,6 @@ func (in *BasicAuthOptions) DeepCopy() *BasicAuthOptions { return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DiagnosticMetrics) DeepCopyInto(out *DiagnosticMetrics) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DiagnosticMetrics. -func (in *DiagnosticMetrics) DeepCopy() *DiagnosticMetrics { - if in == nil { - return nil - } - out := new(DiagnosticMetrics) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *Header) DeepCopyInto(out *Header) { *out = *in @@ -120,6 +105,31 @@ func (in *LogPipeline) DeepCopyObject() runtime.Object { return nil } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *LogPipelineContainerSelector) DeepCopyInto(out *LogPipelineContainerSelector) { + *out = *in + if in.Include != nil { + in, out := &in.Include, &out.Include + *out = make([]string, len(*in)) + copy(*out, *in) + } + if in.Exclude != nil { + in, out := &in.Exclude, &out.Exclude + *out = make([]string, len(*in)) + copy(*out, *in) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipelineContainerSelector. +func (in *LogPipelineContainerSelector) DeepCopy() *LogPipelineContainerSelector { + if in == nil { + return nil + } + out := new(LogPipelineContainerSelector) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *LogPipelineFileMount) DeepCopyInto(out *LogPipelineFileMount) { *out = *in @@ -195,32 +205,39 @@ func (in *LogPipelineInput) DeepCopy() *LogPipelineInput { } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LogPipelineInputContainers) DeepCopyInto(out *LogPipelineInputContainers) { +func (in *LogPipelineList) DeepCopyInto(out *LogPipelineList) { *out = *in - if in.Include != nil { - in, out := &in.Include, &out.Include - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.Exclude != nil { - in, out := &in.Exclude, &out.Exclude - *out = make([]string, len(*in)) - copy(*out, *in) + out.TypeMeta = in.TypeMeta + in.ListMeta.DeepCopyInto(&out.ListMeta) + if in.Items != nil { + in, out := &in.Items, &out.Items + *out = make([]LogPipeline, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipelineInputContainers. -func (in *LogPipelineInputContainers) DeepCopy() *LogPipelineInputContainers { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipelineList. +func (in *LogPipelineList) DeepCopy() *LogPipelineList { if in == nil { return nil } - out := new(LogPipelineInputContainers) + out := new(LogPipelineList) in.DeepCopyInto(out) return out } +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *LogPipelineList) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LogPipelineInputNamespaces) DeepCopyInto(out *LogPipelineInputNamespaces) { +func (in *LogPipelineNamespaceSelector) DeepCopyInto(out *LogPipelineNamespaceSelector) { *out = *in if in.Include != nil { in, out := &in.Include, &out.Include @@ -234,48 +251,16 @@ func (in *LogPipelineInputNamespaces) DeepCopyInto(out *LogPipelineInputNamespac } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipelineInputNamespaces. -func (in *LogPipelineInputNamespaces) DeepCopy() *LogPipelineInputNamespaces { - if in == nil { - return nil - } - out := new(LogPipelineInputNamespaces) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LogPipelineList) DeepCopyInto(out *LogPipelineList) { - *out = *in - out.TypeMeta = in.TypeMeta - in.ListMeta.DeepCopyInto(&out.ListMeta) - if in.Items != nil { - in, out := &in.Items, &out.Items - *out = make([]LogPipeline, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipelineList. -func (in *LogPipelineList) DeepCopy() *LogPipelineList { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogPipelineNamespaceSelector. +func (in *LogPipelineNamespaceSelector) DeepCopy() *LogPipelineNamespaceSelector { if in == nil { return nil } - out := new(LogPipelineList) + out := new(LogPipelineNamespaceSelector) in.DeepCopyInto(out) return out } -// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. -func (in *LogPipelineList) DeepCopyObject() runtime.Object { - if c := in.DeepCopy(); c != nil { - return c - } - return nil -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *LogPipelineOutput) DeepCopyInto(out *LogPipelineOutput) { *out = *in @@ -477,7 +462,7 @@ func (in *MetricPipelineIstioInput) DeepCopyInto(out *MetricPipelineIstioInput) } if in.DiagnosticMetrics != nil { in, out := &in.DiagnosticMetrics, &out.DiagnosticMetrics - *out = new(DiagnosticMetrics) + *out = new(MetricPipelineIstioInputDiagnosticMetrics) **out = **in } } @@ -492,6 +477,21 @@ func (in *MetricPipelineIstioInput) DeepCopy() *MetricPipelineIstioInput { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *MetricPipelineIstioInputDiagnosticMetrics) DeepCopyInto(out *MetricPipelineIstioInputDiagnosticMetrics) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetricPipelineIstioInputDiagnosticMetrics. +func (in *MetricPipelineIstioInputDiagnosticMetrics) DeepCopy() *MetricPipelineIstioInputDiagnosticMetrics { + if in == nil { + return nil + } + out := new(MetricPipelineIstioInputDiagnosticMetrics) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *MetricPipelineList) DeepCopyInto(out *MetricPipelineList) { *out = *in @@ -554,7 +554,7 @@ func (in *MetricPipelinePrometheusInput) DeepCopyInto(out *MetricPipelinePrometh } if in.DiagnosticMetrics != nil { in, out := &in.DiagnosticMetrics, &out.DiagnosticMetrics - *out = new(DiagnosticMetrics) + *out = new(MetricPipelineIstioInputDiagnosticMetrics) **out = **in } } diff --git a/config/development/crd/bases/telemetry.kyma-project.io_logpipelines.yaml b/config/development/crd/bases/telemetry.kyma-project.io_logpipelines.yaml index f8774798c..709c2086f 100644 --- a/config/development/crd/bases/telemetry.kyma-project.io_logpipelines.yaml +++ b/config/development/crd/bases/telemetry.kyma-project.io_logpipelines.yaml @@ -941,8 +941,8 @@ spec: description: Defines where to collect logs, including selector mechanisms. properties: otlp: - description: OTLPInput defines the collection of push-based metrics - that use the OpenTelemetry protocol. + description: Configures an endpoint to receive logs from a OTLP + source. properties: disabled: description: If disabled, push-based OTLP metrics are not diff --git a/internal/fluentbit/config/builder/config_builder_test.go b/internal/fluentbit/config/builder/config_builder_test.go index ab6645e32..dc9c03ef3 100644 --- a/internal/fluentbit/config/builder/config_builder_test.go +++ b/internal/fluentbit/config/builder/config_builder_test.go @@ -36,8 +36,8 @@ func TestCreateLuaDedotFilterWithDefinedHostAndDedotSet(t *testing.T) { logPipeline := &telemetryv1alpha1.LogPipeline{ ObjectMeta: metav1.ObjectMeta{Name: "foo"}, Spec: telemetryv1alpha1.LogPipelineSpec{ - Output: telemetryv1alpha1.Output{ - HTTP: &telemetryv1alpha1.HTTPOutput{ + Output: telemetryv1alpha1.LogPipelineOutput{ + HTTP: &telemetryv1alpha1.LogPipelineHTTPOutput{ Dedot: true, Host: telemetryv1alpha1.ValueType{Value: "localhost"}, }, @@ -52,8 +52,8 @@ func TestCreateLuaDedotFilterWithDefinedHostAndDedotSet(t *testing.T) { func TestCreateLuaDedotFilterWithUndefinedHost(t *testing.T) { logPipeline := &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Output: telemetryv1alpha1.Output{ - HTTP: &telemetryv1alpha1.HTTPOutput{Dedot: true}, + Output: telemetryv1alpha1.LogPipelineOutput{ + HTTP: &telemetryv1alpha1.LogPipelineHTTPOutput{Dedot: true}, }, }, } @@ -65,8 +65,8 @@ func TestCreateLuaDedotFilterWithUndefinedHost(t *testing.T) { func TestCreateLuaDedotFilterWithDedotFalse(t *testing.T) { logPipeline := &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Output: telemetryv1alpha1.Output{ - HTTP: &telemetryv1alpha1.HTTPOutput{ + Output: telemetryv1alpha1.LogPipelineOutput{ + HTTP: &telemetryv1alpha1.LogPipelineHTTPOutput{ Dedot: false, Host: telemetryv1alpha1.ValueType{Value: "localhost"}, }, @@ -141,12 +141,12 @@ func TestMergeSectionsConfig(t *testing.T) { ` logPipeline := &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{ - Application: &telemetryv1alpha1.ApplicationInput{ - Containers: telemetryv1alpha1.InputContainers{ + Input: telemetryv1alpha1.LogPipelineInput{ + Application: &telemetryv1alpha1.LogPipelineApplicationInput{ + Containers: telemetryv1alpha1.LogPipelineContainerSelector{ Exclude: []string{"container1", "container2"}, }, - Namespaces: telemetryv1alpha1.InputNamespaces{ + Namespaces: telemetryv1alpha1.LogPipelineNamespaceSelector{ System: true, }, KeepAnnotations: true, @@ -154,7 +154,7 @@ func TestMergeSectionsConfig(t *testing.T) { KeepOriginalBody: ptr.To(true), }, }, - Filters: []telemetryv1alpha1.Filter{ + Filters: []telemetryv1alpha1.LogPipelineFilter{ { Custom: ` name grep @@ -168,8 +168,8 @@ func TestMergeSectionsConfig(t *testing.T) { `, }, }, - Output: telemetryv1alpha1.Output{ - HTTP: &telemetryv1alpha1.HTTPOutput{ + Output: telemetryv1alpha1.LogPipelineOutput{ + HTTP: &telemetryv1alpha1.LogPipelineHTTPOutput{ Dedot: true, Host: telemetryv1alpha1.ValueType{ Value: "localhost", @@ -231,17 +231,17 @@ func TestMergeSectionsConfigCustomOutput(t *testing.T) { ` logPipeline := &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{ - Application: &telemetryv1alpha1.ApplicationInput{ + Input: telemetryv1alpha1.LogPipelineInput{ + Application: &telemetryv1alpha1.LogPipelineApplicationInput{ KeepAnnotations: true, DropLabels: false, KeepOriginalBody: ptr.To(true), - Namespaces: telemetryv1alpha1.InputNamespaces{ + Namespaces: telemetryv1alpha1.LogPipelineNamespaceSelector{ System: true, }, }, }, - Output: telemetryv1alpha1.Output{ + Output: telemetryv1alpha1.LogPipelineOutput{ Custom: ` name stdout`, }, @@ -318,7 +318,7 @@ func TestBuildFluentBitConfig_Validation(t *testing.T) { args: args{ pipeline: func() *telemetryv1alpha1.LogPipeline { lp := testutils.NewLogPipelineBuilder().Build() - lp.Spec.Output = telemetryv1alpha1.Output{} + lp.Spec.Output = telemetryv1alpha1.LogPipelineOutput{} return &lp }(), }, diff --git a/internal/fluentbit/config/builder/custom_filter_test.go b/internal/fluentbit/config/builder/custom_filter_test.go index 87ca2253e..5caece8fd 100644 --- a/internal/fluentbit/config/builder/custom_filter_test.go +++ b/internal/fluentbit/config/builder/custom_filter_test.go @@ -13,7 +13,7 @@ func TestCreateCustomFilters(t *testing.T) { testPipeline := &telemetryv1alpha1.LogPipeline{ ObjectMeta: metav1.ObjectMeta{Name: "foo"}, Spec: telemetryv1alpha1.LogPipelineSpec{ - Filters: []telemetryv1alpha1.Filter{ + Filters: []telemetryv1alpha1.LogPipelineFilter{ { Custom: ` name multiline diff --git a/internal/fluentbit/config/builder/input_test.go b/internal/fluentbit/config/builder/input_test.go index 42dc919cb..24c94ff79 100644 --- a/internal/fluentbit/config/builder/input_test.go +++ b/internal/fluentbit/config/builder/input_test.go @@ -30,7 +30,7 @@ func TestCreateInput(t *testing.T) { logPipeline := &telemetryv1alpha1.LogPipeline{ ObjectMeta: metav1.ObjectMeta{Name: "test-logpipeline"}, Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{}, + Input: telemetryv1alpha1.LogPipelineInput{}, }, } @@ -83,9 +83,9 @@ func TestCreateIncludeAndExcludePath(t *testing.T) { "include system", &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{ - Application: &telemetryv1alpha1.ApplicationInput{ - Namespaces: telemetryv1alpha1.InputNamespaces{ + Input: telemetryv1alpha1.LogPipelineInput{ + Application: &telemetryv1alpha1.LogPipelineApplicationInput{ + Namespaces: telemetryv1alpha1.LogPipelineNamespaceSelector{ System: true, }, }, @@ -104,9 +104,9 @@ func TestCreateIncludeAndExcludePath(t *testing.T) { "include foo namespace", &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{ - Application: &telemetryv1alpha1.ApplicationInput{ - Namespaces: telemetryv1alpha1.InputNamespaces{ + Input: telemetryv1alpha1.LogPipelineInput{ + Application: &telemetryv1alpha1.LogPipelineApplicationInput{ + Namespaces: telemetryv1alpha1.LogPipelineNamespaceSelector{ Include: []string{ "foo", }, @@ -127,9 +127,9 @@ func TestCreateIncludeAndExcludePath(t *testing.T) { "include foo container", &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{ - Application: &telemetryv1alpha1.ApplicationInput{ - Containers: telemetryv1alpha1.InputContainers{ + Input: telemetryv1alpha1.LogPipelineInput{ + Application: &telemetryv1alpha1.LogPipelineApplicationInput{ + Containers: telemetryv1alpha1.LogPipelineContainerSelector{ Include: []string{ "foo", }, @@ -154,14 +154,14 @@ func TestCreateIncludeAndExcludePath(t *testing.T) { "include foo namespace and bar container", &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{ - Application: &telemetryv1alpha1.ApplicationInput{ - Namespaces: telemetryv1alpha1.InputNamespaces{ + Input: telemetryv1alpha1.LogPipelineInput{ + Application: &telemetryv1alpha1.LogPipelineApplicationInput{ + Namespaces: telemetryv1alpha1.LogPipelineNamespaceSelector{ Include: []string{ "foo", }, }, - Containers: telemetryv1alpha1.InputContainers{ + Containers: telemetryv1alpha1.LogPipelineContainerSelector{ Include: []string{ "bar", }, @@ -182,15 +182,15 @@ func TestCreateIncludeAndExcludePath(t *testing.T) { "include foo and bar namespace, include istio-proxy container", &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{ - Application: &telemetryv1alpha1.ApplicationInput{ - Namespaces: telemetryv1alpha1.InputNamespaces{ + Input: telemetryv1alpha1.LogPipelineInput{ + Application: &telemetryv1alpha1.LogPipelineApplicationInput{ + Namespaces: telemetryv1alpha1.LogPipelineNamespaceSelector{ Include: []string{ "foo", "bar", }, }, - Containers: telemetryv1alpha1.InputContainers{ + Containers: telemetryv1alpha1.LogPipelineContainerSelector{ Include: []string{ "istio-proxy", }, @@ -213,9 +213,9 @@ func TestCreateIncludeAndExcludePath(t *testing.T) { "exclude foo namespace", &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{ - Application: &telemetryv1alpha1.ApplicationInput{ - Namespaces: telemetryv1alpha1.InputNamespaces{ + Input: telemetryv1alpha1.LogPipelineInput{ + Application: &telemetryv1alpha1.LogPipelineApplicationInput{ + Namespaces: telemetryv1alpha1.LogPipelineNamespaceSelector{ Exclude: []string{ "foo", }, @@ -237,12 +237,12 @@ func TestCreateIncludeAndExcludePath(t *testing.T) { "include system, exclude foo container", &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{ - Application: &telemetryv1alpha1.ApplicationInput{ - Namespaces: telemetryv1alpha1.InputNamespaces{ + Input: telemetryv1alpha1.LogPipelineInput{ + Application: &telemetryv1alpha1.LogPipelineApplicationInput{ + Namespaces: telemetryv1alpha1.LogPipelineNamespaceSelector{ System: true, }, - Containers: telemetryv1alpha1.InputContainers{ + Containers: telemetryv1alpha1.LogPipelineContainerSelector{ Exclude: []string{ "foo", }, @@ -264,9 +264,9 @@ func TestCreateIncludeAndExcludePath(t *testing.T) { "exclude foo container", &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{ - Application: &telemetryv1alpha1.ApplicationInput{ - Containers: telemetryv1alpha1.InputContainers{ + Input: telemetryv1alpha1.LogPipelineInput{ + Application: &telemetryv1alpha1.LogPipelineApplicationInput{ + Containers: telemetryv1alpha1.LogPipelineContainerSelector{ Exclude: []string{ "foo", }, @@ -292,14 +292,14 @@ func TestCreateIncludeAndExcludePath(t *testing.T) { "exclude foo namespace, exclude bar container", &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{ - Application: &telemetryv1alpha1.ApplicationInput{ - Namespaces: telemetryv1alpha1.InputNamespaces{ + Input: telemetryv1alpha1.LogPipelineInput{ + Application: &telemetryv1alpha1.LogPipelineApplicationInput{ + Namespaces: telemetryv1alpha1.LogPipelineNamespaceSelector{ Exclude: []string{ "foo", }, }, - Containers: telemetryv1alpha1.InputContainers{ + Containers: telemetryv1alpha1.LogPipelineContainerSelector{ Exclude: []string{ "bar", }, @@ -322,9 +322,9 @@ func TestCreateIncludeAndExcludePath(t *testing.T) { "include system and foo namespaces", &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{ - Application: &telemetryv1alpha1.ApplicationInput{ - Namespaces: telemetryv1alpha1.InputNamespaces{ + Input: telemetryv1alpha1.LogPipelineInput{ + Application: &telemetryv1alpha1.LogPipelineApplicationInput{ + Namespaces: telemetryv1alpha1.LogPipelineNamespaceSelector{ Include: []string{ "kyma-system", "kube-system", diff --git a/internal/fluentbit/config/builder/kubernetes_filter.go b/internal/fluentbit/config/builder/kubernetes_filter.go index 123624ae7..1b5be50a3 100644 --- a/internal/fluentbit/config/builder/kubernetes_filter.go +++ b/internal/fluentbit/config/builder/kubernetes_filter.go @@ -7,7 +7,7 @@ import ( ) func createKubernetesFilter(pipeline *telemetryv1alpha1.LogPipeline) string { - appInput := &telemetryv1alpha1.ApplicationInput{} + appInput := &telemetryv1alpha1.LogPipelineApplicationInput{} if pipeline.Spec.Input.Application != nil { appInput = pipeline.Spec.Input.Application } diff --git a/internal/fluentbit/config/builder/kubernetes_filter_test.go b/internal/fluentbit/config/builder/kubernetes_filter_test.go index f5fbe81b8..6a7d095ac 100644 --- a/internal/fluentbit/config/builder/kubernetes_filter_test.go +++ b/internal/fluentbit/config/builder/kubernetes_filter_test.go @@ -27,8 +27,8 @@ func TestCreateKubernetesFilterKeepAnnotations(t *testing.T) { logPipeline := &telemetryv1alpha1.LogPipeline{ ObjectMeta: metav1.ObjectMeta{Name: "test-logpipeline"}, Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{ - Application: &telemetryv1alpha1.ApplicationInput{ + Input: telemetryv1alpha1.LogPipelineInput{ + Application: &telemetryv1alpha1.LogPipelineApplicationInput{ KeepAnnotations: true, }}}} @@ -53,8 +53,8 @@ func TestCreateKubernetesFilterDropLabels(t *testing.T) { logPipeline := &telemetryv1alpha1.LogPipeline{ ObjectMeta: metav1.ObjectMeta{Name: "test-logpipeline"}, Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{ - Application: &telemetryv1alpha1.ApplicationInput{ + Input: telemetryv1alpha1.LogPipelineInput{ + Application: &telemetryv1alpha1.LogPipelineApplicationInput{ DropLabels: true, }}}} @@ -79,8 +79,8 @@ func TestCreateKubernetesFilterKeepOriginalBodyTrue(t *testing.T) { logPipeline := &telemetryv1alpha1.LogPipeline{ ObjectMeta: metav1.ObjectMeta{Name: "test-logpipeline"}, Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{ - Application: &telemetryv1alpha1.ApplicationInput{ + Input: telemetryv1alpha1.LogPipelineInput{ + Application: &telemetryv1alpha1.LogPipelineApplicationInput{ KeepOriginalBody: ptr.To(true), }}}} @@ -105,8 +105,8 @@ func TestCreateKubernetesFilterKeepOriginalBodyFalse(t *testing.T) { logPipeline := &telemetryv1alpha1.LogPipeline{ ObjectMeta: metav1.ObjectMeta{Name: "test-logpipeline"}, Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{ - Application: &telemetryv1alpha1.ApplicationInput{ + Input: telemetryv1alpha1.LogPipelineInput{ + Application: &telemetryv1alpha1.LogPipelineApplicationInput{ KeepOriginalBody: ptr.To(false), }}}} diff --git a/internal/fluentbit/config/builder/output.go b/internal/fluentbit/config/builder/output.go index c8bdc31bf..ccdaf8a67 100644 --- a/internal/fluentbit/config/builder/output.go +++ b/internal/fluentbit/config/builder/output.go @@ -25,7 +25,7 @@ func createOutputSection(pipeline *telemetryv1alpha1.LogPipeline, defaults Pipel return "" } -func generateCustomOutput(output *telemetryv1alpha1.Output, fsBufferLimit string, name string) string { +func generateCustomOutput(output *telemetryv1alpha1.LogPipelineOutput, fsBufferLimit string, name string) string { sb := NewOutputSectionBuilder() customOutputParams := parseMultiline(output.Custom) aliasPresent := customOutputParams.ContainsKey("alias") @@ -45,7 +45,7 @@ func generateCustomOutput(output *telemetryv1alpha1.Output, fsBufferLimit string return sb.Build() } -func generateHTTPOutput(httpOutput *telemetryv1alpha1.HTTPOutput, fsBufferLimit string, name string) string { +func generateHTTPOutput(httpOutput *telemetryv1alpha1.LogPipelineHTTPOutput, fsBufferLimit string, name string) string { sb := NewOutputSectionBuilder() sb.AddConfigParam("name", "http") sb.AddConfigParam("allow_duplicated_headers", "true") @@ -74,28 +74,28 @@ func generateHTTPOutput(httpOutput *telemetryv1alpha1.HTTPOutput, fsBufferLimit } tlsEnabled := "on" - if httpOutput.TLSConfig.Disabled { + if httpOutput.TLS.Disabled { tlsEnabled = "off" } sb.AddConfigParam("tls", tlsEnabled) tlsVerify := "on" - if httpOutput.TLSConfig.SkipCertificateValidation { + if httpOutput.TLS.SkipCertificateValidation { tlsVerify = "off" } sb.AddConfigParam("tls.verify", tlsVerify) - if httpOutput.TLSConfig.CA.IsValid() { + if httpOutput.TLS.CA.IsValid() { sb.AddConfigParam("tls.ca_file", fmt.Sprintf("/fluent-bit/etc/output-tls-config/%s-ca.crt", name)) } - if httpOutput.TLSConfig.Cert.IsValid() { + if httpOutput.TLS.Cert.IsValid() { sb.AddConfigParam("tls.crt_file", fmt.Sprintf("/fluent-bit/etc/output-tls-config/%s-cert.crt", name)) } - if httpOutput.TLSConfig.Key.IsValid() { + if httpOutput.TLS.Key.IsValid() { sb.AddConfigParam("tls.key_file", fmt.Sprintf("/fluent-bit/etc/output-tls-config/%s-key.key", name)) } diff --git a/internal/fluentbit/config/builder/output_test.go b/internal/fluentbit/config/builder/output_test.go index e1d313645..7be9e40f4 100644 --- a/internal/fluentbit/config/builder/output_test.go +++ b/internal/fluentbit/config/builder/output_test.go @@ -20,7 +20,7 @@ func TestCreateOutputSectionWithCustomOutput(t *testing.T) { ` logPipeline := &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Output: telemetryv1alpha1.Output{ + Output: telemetryv1alpha1.LogPipelineOutput{ Custom: ` name null`, }, @@ -54,8 +54,8 @@ func TestCreateOutputSectionWithHTTPOutput(t *testing.T) { ` logPipeline := &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Output: telemetryv1alpha1.Output{ - HTTP: &telemetryv1alpha1.HTTPOutput{ + Output: telemetryv1alpha1.LogPipelineOutput{ + HTTP: &telemetryv1alpha1.LogPipelineHTTPOutput{ Dedot: true, Port: "1234", Host: telemetryv1alpha1.ValueType{Value: "localhost"}, @@ -95,8 +95,8 @@ func TestCreateOutputSectionWithHTTPOutputWithSecretReference(t *testing.T) { ` logPipeline := &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Output: telemetryv1alpha1.Output{ - HTTP: &telemetryv1alpha1.HTTPOutput{ + Output: telemetryv1alpha1.LogPipelineOutput{ + HTTP: &telemetryv1alpha1.LogPipelineHTTPOutput{ Dedot: true, URI: "/my-uri", Host: telemetryv1alpha1.ValueType{Value: "localhost"}, @@ -144,12 +144,12 @@ func TestCreateOutputSectionWithHTTPOutputWithTLS(t *testing.T) { logPipeline := &telemetryv1alpha1.LogPipeline{ ObjectMeta: metav1.ObjectMeta{Name: "foo"}, Spec: telemetryv1alpha1.LogPipelineSpec{ - Output: telemetryv1alpha1.Output{ - HTTP: &telemetryv1alpha1.HTTPOutput{ + Output: telemetryv1alpha1.LogPipelineOutput{ + HTTP: &telemetryv1alpha1.LogPipelineHTTPOutput{ Dedot: true, URI: "/my-uri", Host: telemetryv1alpha1.ValueType{Value: "localhost"}, - TLSConfig: telemetryv1alpha1.TLSConfig{ + TLS: telemetryv1alpha1.LogPipelineOutputTLS{ Disabled: false, SkipCertificateValidation: false, CA: &telemetryv1alpha1.ValueType{Value: "fake-ca-value"}, diff --git a/internal/fluentbit/config/builder/rewritetag_filter.go b/internal/fluentbit/config/builder/rewritetag_filter.go index d11b492aa..e81cabf1c 100644 --- a/internal/fluentbit/config/builder/rewritetag_filter.go +++ b/internal/fluentbit/config/builder/rewritetag_filter.go @@ -7,7 +7,7 @@ import ( telemetryv1alpha1 "github.com/kyma-project/telemetry-manager/apis/telemetry/v1alpha1" ) -func getEmitterPostfixByOutput(output *telemetryv1alpha1.Output) string { +func getEmitterPostfixByOutput(output *telemetryv1alpha1.LogPipelineOutput) string { if output.IsHTTPDefined() { return "http" } @@ -42,7 +42,7 @@ func createRewriteTagFilter(logPipeline *telemetryv1alpha1.LogPipeline, defaults AddConfigParam("Emitter_Storage.type", defaults.StorageType). AddConfigParam("Emitter_Mem_Buf_Limit", defaults.MemoryBufferLimit) - var containers telemetryv1alpha1.InputContainers + var containers telemetryv1alpha1.LogPipelineContainerSelector if logPipeline.Spec.Input.Application != nil { containers = logPipeline.Spec.Input.Application.Containers } diff --git a/internal/fluentbit/config/builder/rewritetag_filter_test.go b/internal/fluentbit/config/builder/rewritetag_filter_test.go index 5925884ef..08d7b7b7c 100644 --- a/internal/fluentbit/config/builder/rewritetag_filter_test.go +++ b/internal/fluentbit/config/builder/rewritetag_filter_test.go @@ -21,8 +21,8 @@ func TestCreateRewriteTagFilterIncludeContainers(t *testing.T) { Name: "logpipeline1", }, Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{Application: &telemetryv1alpha1.ApplicationInput{ - Containers: telemetryv1alpha1.InputContainers{ + Input: telemetryv1alpha1.LogPipelineInput{Application: &telemetryv1alpha1.LogPipelineApplicationInput{ + Containers: telemetryv1alpha1.LogPipelineContainerSelector{ Include: []string{"container1", "container2"}}}}}} expected := `[FILTER] @@ -50,8 +50,8 @@ func TestCreateRewriteTagFilterExcludeContainers(t *testing.T) { Name: "logpipeline1", }, Spec: telemetryv1alpha1.LogPipelineSpec{ - Input: telemetryv1alpha1.Input{Application: &telemetryv1alpha1.ApplicationInput{ - Containers: telemetryv1alpha1.InputContainers{ + Input: telemetryv1alpha1.LogPipelineInput{Application: &telemetryv1alpha1.LogPipelineApplicationInput{ + Containers: telemetryv1alpha1.LogPipelineContainerSelector{ Exclude: []string{"container1", "container2"}}}}}} expected := `[FILTER] @@ -79,7 +79,7 @@ func TestCreateRewriteTagFilterWithCustomOutput(t *testing.T) { Name: "logpipeline1", }, Spec: telemetryv1alpha1.LogPipelineSpec{ - Output: telemetryv1alpha1.Output{ + Output: telemetryv1alpha1.LogPipelineOutput{ Custom: ` name stdout`, }, diff --git a/internal/reconciler/logpipeline/fluentbit/finalizer_test.go b/internal/reconciler/logpipeline/fluentbit/finalizer_test.go index fd8493551..65d683bb6 100644 --- a/internal/reconciler/logpipeline/fluentbit/finalizer_test.go +++ b/internal/reconciler/logpipeline/fluentbit/finalizer_test.go @@ -35,7 +35,7 @@ func TestEnsureFinalizers(t *testing.T) { pipeline := &telemetryv1alpha1.LogPipeline{ ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, Spec: telemetryv1alpha1.LogPipelineSpec{ - Files: []telemetryv1alpha1.FileMount{ + Files: []telemetryv1alpha1.LogPipelineFileMount{ { Name: "script.js", Content: "", diff --git a/internal/reconciler/logpipeline/fluentbit/sync.go b/internal/reconciler/logpipeline/fluentbit/sync.go index 4a73f20a1..674831b01 100644 --- a/internal/reconciler/logpipeline/fluentbit/sync.go +++ b/internal/reconciler/logpipeline/fluentbit/sync.go @@ -188,7 +188,7 @@ func (s *syncer) syncTLSConfigSecret(ctx context.Context, logPipelines []telemet continue } - tlsConfig := output.HTTP.TLSConfig + tlsConfig := output.HTTP.TLS if tlsConfig.CA.IsValid() { targetKey := fmt.Sprintf("%s-ca.crt", logPipelines[i].Name) if err := s.copyFromValueOrSecret(ctx, *tlsConfig.CA, targetKey, newSecret.Data); err != nil { diff --git a/internal/reconciler/logpipeline/fluentbit/sync_test.go b/internal/reconciler/logpipeline/fluentbit/sync_test.go index 4d5c10bba..f864ee89e 100644 --- a/internal/reconciler/logpipeline/fluentbit/sync_test.go +++ b/internal/reconciler/logpipeline/fluentbit/sync_test.go @@ -37,7 +37,7 @@ func TestSyncSectionsConfigMap(t *testing.T) { Name: "noop", }, Spec: telemetryv1alpha1.LogPipelineSpec{ - Output: telemetryv1alpha1.Output{ + Output: telemetryv1alpha1.LogPipelineOutput{ Custom: ` name null alias foo`, @@ -68,7 +68,7 @@ alias foo`, Name: "noop", }, Spec: telemetryv1alpha1.LogPipelineSpec{ - Output: telemetryv1alpha1.Output{ + Output: telemetryv1alpha1.LogPipelineOutput{ Custom: ` name null alias foo`, @@ -105,7 +105,7 @@ alias bar` Name: "noop", }, Spec: telemetryv1alpha1.LogPipelineSpec{ - Output: telemetryv1alpha1.Output{ + Output: telemetryv1alpha1.LogPipelineOutput{ Custom: ` name null alias foo`, @@ -161,11 +161,11 @@ func TestSyncFilesConfigMap(t *testing.T) { Name: "noop", }, Spec: telemetryv1alpha1.LogPipelineSpec{ - Files: []telemetryv1alpha1.FileMount{ + Files: []telemetryv1alpha1.LogPipelineFileMount{ {Name: "lua-script", Content: "here comes some lua code"}, {Name: "js-script", Content: "here comes some js code"}, }, - Output: telemetryv1alpha1.Output{ + Output: telemetryv1alpha1.LogPipelineOutput{ Custom: ` name null alias foo`, @@ -194,10 +194,10 @@ alias foo`, Name: "noop", }, Spec: telemetryv1alpha1.LogPipelineSpec{ - Files: []telemetryv1alpha1.FileMount{ + Files: []telemetryv1alpha1.LogPipelineFileMount{ {Name: "lua-script", Content: "here comes some lua code"}, }, - Output: telemetryv1alpha1.Output{ + Output: telemetryv1alpha1.LogPipelineOutput{ Custom: ` name null alias foo`, @@ -227,10 +227,10 @@ alias foo`, Name: "noop", }, Spec: telemetryv1alpha1.LogPipelineSpec{ - Files: []telemetryv1alpha1.FileMount{ + Files: []telemetryv1alpha1.LogPipelineFileMount{ {Name: "lua-script", Content: "here comes some lua code"}, }, - Output: telemetryv1alpha1.Output{ + Output: telemetryv1alpha1.LogPipelineOutput{ Custom: ` name null alias foo`, @@ -272,8 +272,8 @@ func TestSyncEnvSecret(t *testing.T) { { ObjectMeta: metav1.ObjectMeta{Name: "http"}, Spec: telemetryv1alpha1.LogPipelineSpec{ - Output: telemetryv1alpha1.Output{ - HTTP: &telemetryv1alpha1.HTTPOutput{ + Output: telemetryv1alpha1.LogPipelineOutput{ + HTTP: &telemetryv1alpha1.LogPipelineHTTPOutput{ Host: telemetryv1alpha1.ValueType{Value: "localhost"}, User: telemetryv1alpha1.ValueType{Value: "admin"}, Password: telemetryv1alpha1.ValueType{ @@ -382,10 +382,10 @@ func TestSyncTLSConfigSecret(t *testing.T) { { ObjectMeta: metav1.ObjectMeta{Name: "pipeline-1"}, Spec: telemetryv1alpha1.LogPipelineSpec{ - Output: telemetryv1alpha1.Output{ - HTTP: &telemetryv1alpha1.HTTPOutput{ + Output: telemetryv1alpha1.LogPipelineOutput{ + HTTP: &telemetryv1alpha1.LogPipelineHTTPOutput{ Host: telemetryv1alpha1.ValueType{Value: "localhost"}, - TLSConfig: telemetryv1alpha1.TLSConfig{ + TLS: telemetryv1alpha1.LogPipelineOutputTLS{ Disabled: false, SkipCertificateValidation: false, CA: &telemetryv1alpha1.ValueType{ diff --git a/internal/reconciler/logpipeline/fluentbit/validator.go b/internal/reconciler/logpipeline/fluentbit/validator.go index 0e1a450a5..eb35634dc 100644 --- a/internal/reconciler/logpipeline/fluentbit/validator.go +++ b/internal/reconciler/logpipeline/fluentbit/validator.go @@ -40,9 +40,9 @@ func (v *Validator) validate(ctx context.Context, pipeline *telemetryv1alpha1.Lo if tlsValidationRequired(pipeline) { tlsConfig := tlscert.TLSBundle{ - Cert: pipeline.Spec.Output.HTTP.TLSConfig.Cert, - Key: pipeline.Spec.Output.HTTP.TLSConfig.Key, - CA: pipeline.Spec.Output.HTTP.TLSConfig.CA, + Cert: pipeline.Spec.Output.HTTP.TLS.Cert, + Key: pipeline.Spec.Output.HTTP.TLS.Key, + CA: pipeline.Spec.Output.HTTP.TLS.CA, } if err := v.TLSCertValidator.Validate(ctx, tlsConfig); err != nil { @@ -59,5 +59,5 @@ func tlsValidationRequired(pipeline *telemetryv1alpha1.LogPipeline) bool { return false } - return http.TLSConfig.Cert != nil || http.TLSConfig.Key != nil || http.TLSConfig.CA != nil + return http.TLS.Cert != nil || http.TLS.Key != nil || http.TLS.CA != nil } diff --git a/internal/reconciler/logpipeline/reconciler_test.go b/internal/reconciler/logpipeline/reconciler_test.go index cbb167848..79f13c4c8 100644 --- a/internal/reconciler/logpipeline/reconciler_test.go +++ b/internal/reconciler/logpipeline/reconciler_test.go @@ -32,7 +32,7 @@ func TestGetOutputType(t *testing.T) { args: args{ &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Output: telemetryv1alpha1.Output{ + Output: telemetryv1alpha1.LogPipelineOutput{ OTLP: &telemetryv1alpha1.OTLPOutput{}, }, }, @@ -46,8 +46,8 @@ func TestGetOutputType(t *testing.T) { args: args{ &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Output: telemetryv1alpha1.Output{ - HTTP: &telemetryv1alpha1.HTTPOutput{}, + Output: telemetryv1alpha1.LogPipelineOutput{ + HTTP: &telemetryv1alpha1.LogPipelineHTTPOutput{}, }, }, }, @@ -60,7 +60,7 @@ func TestGetOutputType(t *testing.T) { args: args{ &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Output: telemetryv1alpha1.Output{ + Output: telemetryv1alpha1.LogPipelineOutput{ Custom: "custom", }, }, diff --git a/internal/testutils/log_pipeline_builder.go b/internal/testutils/log_pipeline_builder.go index 852899fe6..ed0f6140f 100644 --- a/internal/testutils/log_pipeline_builder.go +++ b/internal/testutils/log_pipeline_builder.go @@ -20,11 +20,11 @@ type LogPipelineBuilder struct { finalizers []string deletionTimeStamp metav1.Time - input telemetryv1alpha1.Input + input telemetryv1alpha1.LogPipelineInput - filters []telemetryv1alpha1.Filter + filters []telemetryv1alpha1.LogPipelineFilter - httpOutput *telemetryv1alpha1.HTTPOutput + httpOutput *telemetryv1alpha1.LogPipelineHTTPOutput otlpOutput *telemetryv1alpha1.OTLPOutput customOutput string @@ -54,7 +54,7 @@ func (b *LogPipelineBuilder) WithFinalizer(finalizer string) *LogPipelineBuilder func (b *LogPipelineBuilder) WithApplicationInputDisabled() *LogPipelineBuilder { if b.input.Application == nil { - b.input.Application = &telemetryv1alpha1.ApplicationInput{} + b.input.Application = &telemetryv1alpha1.LogPipelineApplicationInput{} } b.input.Application.Enabled = ptr.To(false) @@ -72,7 +72,7 @@ func (b *LogPipelineBuilder) WithOTLPInput() *LogPipelineBuilder { func (b *LogPipelineBuilder) WithIncludeContainers(containers ...string) *LogPipelineBuilder { if b.input.Application == nil { - b.input.Application = &telemetryv1alpha1.ApplicationInput{} + b.input.Application = &telemetryv1alpha1.LogPipelineApplicationInput{} } b.input.Application.Containers.Include = containers @@ -82,7 +82,7 @@ func (b *LogPipelineBuilder) WithIncludeContainers(containers ...string) *LogPip func (b *LogPipelineBuilder) WithExcludeContainers(containers ...string) *LogPipelineBuilder { if b.input.Application == nil { - b.input.Application = &telemetryv1alpha1.ApplicationInput{} + b.input.Application = &telemetryv1alpha1.LogPipelineApplicationInput{} } b.input.Application.Containers.Exclude = containers @@ -92,7 +92,7 @@ func (b *LogPipelineBuilder) WithExcludeContainers(containers ...string) *LogPip func (b *LogPipelineBuilder) WithIncludeNamespaces(namespaces ...string) *LogPipelineBuilder { if b.input.Application == nil { - b.input.Application = &telemetryv1alpha1.ApplicationInput{} + b.input.Application = &telemetryv1alpha1.LogPipelineApplicationInput{} } b.input.Application.Namespaces.Include = namespaces @@ -102,7 +102,7 @@ func (b *LogPipelineBuilder) WithIncludeNamespaces(namespaces ...string) *LogPip func (b *LogPipelineBuilder) WithExcludeNamespaces(namespaces ...string) *LogPipelineBuilder { if b.input.Application == nil { - b.input.Application = &telemetryv1alpha1.ApplicationInput{} + b.input.Application = &telemetryv1alpha1.LogPipelineApplicationInput{} } b.input.Application.Namespaces.Exclude = namespaces @@ -112,7 +112,7 @@ func (b *LogPipelineBuilder) WithExcludeNamespaces(namespaces ...string) *LogPip func (b *LogPipelineBuilder) WithSystemNamespaces(enable bool) *LogPipelineBuilder { if b.input.Application == nil { - b.input.Application = &telemetryv1alpha1.ApplicationInput{} + b.input.Application = &telemetryv1alpha1.LogPipelineApplicationInput{} } b.input.Application.Namespaces.System = enable @@ -122,7 +122,7 @@ func (b *LogPipelineBuilder) WithSystemNamespaces(enable bool) *LogPipelineBuild func (b *LogPipelineBuilder) WithKeepAnnotations(keep bool) *LogPipelineBuilder { if b.input.Application == nil { - b.input.Application = &telemetryv1alpha1.ApplicationInput{} + b.input.Application = &telemetryv1alpha1.LogPipelineApplicationInput{} } b.input.Application.KeepAnnotations = keep @@ -132,7 +132,7 @@ func (b *LogPipelineBuilder) WithKeepAnnotations(keep bool) *LogPipelineBuilder func (b *LogPipelineBuilder) WithDropLabels(drop bool) *LogPipelineBuilder { if b.input.Application == nil { - b.input.Application = &telemetryv1alpha1.ApplicationInput{} + b.input.Application = &telemetryv1alpha1.LogPipelineApplicationInput{} } b.input.Application.DropLabels = drop @@ -142,7 +142,7 @@ func (b *LogPipelineBuilder) WithDropLabels(drop bool) *LogPipelineBuilder { func (b *LogPipelineBuilder) WithKeepOriginalBody(keep bool) *LogPipelineBuilder { if b.input.Application == nil { - b.input.Application = &telemetryv1alpha1.ApplicationInput{} + b.input.Application = &telemetryv1alpha1.LogPipelineApplicationInput{} } b.input.Application.KeepOriginalBody = ptr.To(keep) @@ -151,7 +151,7 @@ func (b *LogPipelineBuilder) WithKeepOriginalBody(keep bool) *LogPipelineBuilder } func (b *LogPipelineBuilder) WithCustomFilter(filter string) *LogPipelineBuilder { - b.filters = append(b.filters, telemetryv1alpha1.Filter{Custom: filter}) + b.filters = append(b.filters, telemetryv1alpha1.LogPipelineFilter{Custom: filter}) return b } @@ -211,7 +211,7 @@ func (b *LogPipelineBuilder) Build() telemetryv1alpha1.LogPipeline { Spec: telemetryv1alpha1.LogPipelineSpec{ Input: b.input, Filters: b.filters, - Output: telemetryv1alpha1.Output{ + Output: telemetryv1alpha1.LogPipelineOutput{ HTTP: b.httpOutput, Custom: b.customOutput, OTLP: b.otlpOutput, @@ -228,13 +228,13 @@ func (b *LogPipelineBuilder) Build() telemetryv1alpha1.LogPipeline { return logPipeline } -func defaultHTTPOutput() *telemetryv1alpha1.HTTPOutput { - return &telemetryv1alpha1.HTTPOutput{ +func defaultHTTPOutput() *telemetryv1alpha1.LogPipelineHTTPOutput { + return &telemetryv1alpha1.LogPipelineHTTPOutput{ Host: telemetryv1alpha1.ValueType{Value: "127.0.0.1"}, Port: "8080", URI: "/", Format: "json", - TLSConfig: telemetryv1alpha1.TLSConfig{ + TLS: telemetryv1alpha1.LogPipelineOutputTLS{ Disabled: true, SkipCertificateValidation: true, }, diff --git a/internal/testutils/metric_pipeline_builder.go b/internal/testutils/metric_pipeline_builder.go index a6c055bd3..057f42766 100644 --- a/internal/testutils/metric_pipeline_builder.go +++ b/internal/testutils/metric_pipeline_builder.go @@ -161,7 +161,7 @@ func (b *MetricPipelineBuilder) WithPrometheusInputDiagnosticMetrics(enable bool } if b.inPrometheus.DiagnosticMetrics == nil { - b.inPrometheus.DiagnosticMetrics = &telemetryv1alpha1.DiagnosticMetrics{} + b.inPrometheus.DiagnosticMetrics = &telemetryv1alpha1.MetricPipelineIstioInputDiagnosticMetrics{} } b.inPrometheus.DiagnosticMetrics.Enabled = enable @@ -175,7 +175,7 @@ func (b *MetricPipelineBuilder) WithIstioInputDiagnosticMetrics(enable bool) *Me } if b.inIstio.DiagnosticMetrics == nil { - b.inIstio.DiagnosticMetrics = &telemetryv1alpha1.DiagnosticMetrics{} + b.inIstio.DiagnosticMetrics = &telemetryv1alpha1.MetricPipelineIstioInputDiagnosticMetrics{} } b.inIstio.DiagnosticMetrics.Enabled = enable diff --git a/internal/testutils/pipeline_opts.go b/internal/testutils/pipeline_opts.go index 5bc171811..1a04b971c 100644 --- a/internal/testutils/pipeline_opts.go +++ b/internal/testutils/pipeline_opts.go @@ -106,11 +106,11 @@ func OTLPEndpointPath(path string) OTLPOutputOption { } } -type HTTPOutputOption func(output *telemetryv1alpha1.HTTPOutput) +type HTTPOutputOption func(output *telemetryv1alpha1.LogPipelineHTTPOutput) func HTTPClientTLSFromString(ca, cert, key string) HTTPOutputOption { - return func(output *telemetryv1alpha1.HTTPOutput) { - output.TLSConfig = telemetryv1alpha1.TLSConfig{ + return func(output *telemetryv1alpha1.LogPipelineHTTPOutput) { + output.TLS = telemetryv1alpha1.LogPipelineOutputTLS{ CA: &telemetryv1alpha1.ValueType{Value: ca}, Cert: &telemetryv1alpha1.ValueType{Value: cert}, Key: &telemetryv1alpha1.ValueType{Value: key}, @@ -118,20 +118,20 @@ func HTTPClientTLSFromString(ca, cert, key string) HTTPOutputOption { } } -func HTTPClientTLS(tls telemetryv1alpha1.TLSConfig) HTTPOutputOption { - return func(output *telemetryv1alpha1.HTTPOutput) { - output.TLSConfig = tls +func HTTPClientTLS(tls telemetryv1alpha1.LogPipelineOutputTLS) HTTPOutputOption { + return func(output *telemetryv1alpha1.LogPipelineHTTPOutput) { + output.TLS = tls } } func HTTPHost(host string) HTTPOutputOption { - return func(output *telemetryv1alpha1.HTTPOutput) { + return func(output *telemetryv1alpha1.LogPipelineHTTPOutput) { output.Host = telemetryv1alpha1.ValueType{Value: host} } } func HTTPHostFromSecret(secretName, secretNamespace, key string) HTTPOutputOption { - return func(output *telemetryv1alpha1.HTTPOutput) { + return func(output *telemetryv1alpha1.LogPipelineHTTPOutput) { output.Host = telemetryv1alpha1.ValueType{ValueFrom: &telemetryv1alpha1.ValueFromSource{SecretKeyRef: &telemetryv1alpha1.SecretKeyRef{ Name: secretName, Namespace: secretNamespace, @@ -141,19 +141,19 @@ func HTTPHostFromSecret(secretName, secretNamespace, key string) HTTPOutputOptio } func HTTPPort(port int32) HTTPOutputOption { - return func(output *telemetryv1alpha1.HTTPOutput) { + return func(output *telemetryv1alpha1.LogPipelineHTTPOutput) { output.Port = strconv.Itoa(int(port)) } } func HTTPDedot(dedot bool) HTTPOutputOption { - return func(output *telemetryv1alpha1.HTTPOutput) { + return func(output *telemetryv1alpha1.LogPipelineHTTPOutput) { output.Dedot = dedot } } func HTTPBasicAuthFromSecret(secretName, secretNamespace, userKey, passwordKey string) HTTPOutputOption { - return func(output *telemetryv1alpha1.HTTPOutput) { + return func(output *telemetryv1alpha1.LogPipelineHTTPOutput) { output.User = telemetryv1alpha1.ValueType{ValueFrom: &telemetryv1alpha1.ValueFromSource{SecretKeyRef: &telemetryv1alpha1.SecretKeyRef{ Name: secretName, Namespace: secretNamespace, diff --git a/test/e2e/logs_mtls_missing_values_test.go b/test/e2e/logs_mtls_missing_values_test.go index 1decd1464..c77461367 100644 --- a/test/e2e/logs_mtls_missing_values_test.go +++ b/test/e2e/logs_mtls_missing_values_test.go @@ -50,7 +50,7 @@ var _ = Describe(suite.ID(), Label(suite.LabelLogs), Ordered, func() { WithHTTPOutput( testutils.HTTPHost(backend.Host()), testutils.HTTPPort(backend.Port()), - testutils.HTTPClientTLS(telemetryv1alpha1.TLSConfig{ + testutils.HTTPClientTLS(telemetryv1alpha1.LogPipelineOutputTLS{ Cert: &telemetryv1alpha1.ValueType{Value: clientCerts.ClientCertPem.String()}, Key: &telemetryv1alpha1.ValueType{Value: clientCerts.ClientKeyPem.String()}, }), @@ -62,7 +62,7 @@ var _ = Describe(suite.ID(), Label(suite.LabelLogs), Ordered, func() { WithHTTPOutput( testutils.HTTPHost(backend.Host()), testutils.HTTPPort(backend.Port()), - testutils.HTTPClientTLS(telemetryv1alpha1.TLSConfig{ + testutils.HTTPClientTLS(telemetryv1alpha1.LogPipelineOutputTLS{ CA: &telemetryv1alpha1.ValueType{Value: clientCerts.CaCertPem.String()}, Key: &telemetryv1alpha1.ValueType{Value: clientCerts.ClientKeyPem.String()}, }), @@ -74,7 +74,7 @@ var _ = Describe(suite.ID(), Label(suite.LabelLogs), Ordered, func() { WithHTTPOutput( testutils.HTTPHost(backend.Host()), testutils.HTTPPort(backend.Port()), - testutils.HTTPClientTLS(telemetryv1alpha1.TLSConfig{ + testutils.HTTPClientTLS(telemetryv1alpha1.LogPipelineOutputTLS{ CA: &telemetryv1alpha1.ValueType{Value: clientCerts.CaCertPem.String()}, Cert: &telemetryv1alpha1.ValueType{Value: clientCerts.ClientCertPem.String()}, }), @@ -86,7 +86,7 @@ var _ = Describe(suite.ID(), Label(suite.LabelLogs), Ordered, func() { WithHTTPOutput( testutils.HTTPHost(backend.Host()), testutils.HTTPPort(backend.Port()), - testutils.HTTPClientTLS(telemetryv1alpha1.TLSConfig{ + testutils.HTTPClientTLS(telemetryv1alpha1.LogPipelineOutputTLS{ Disabled: true, SkipCertificateValidation: true, }), @@ -98,7 +98,7 @@ var _ = Describe(suite.ID(), Label(suite.LabelLogs), Ordered, func() { WithHTTPOutput( testutils.HTTPHost(backend.Host()), testutils.HTTPPort(backend.Port()), - testutils.HTTPClientTLS(telemetryv1alpha1.TLSConfig{ + testutils.HTTPClientTLS(telemetryv1alpha1.LogPipelineOutputTLS{ CA: &telemetryv1alpha1.ValueType{Value: clientCerts.CaCertPem.String()}, }), ). diff --git a/test/e2e/logs_version_conversion_test.go b/test/e2e/logs_version_conversion_test.go index 927974a92..0396eb587 100644 --- a/test/e2e/logs_version_conversion_test.go +++ b/test/e2e/logs_version_conversion_test.go @@ -28,14 +28,14 @@ var _ = Describe(suite.ID(), Label(suite.LabelLogs, suite.LabelExperimental), Or Name: v1Alpha1PipelineName, }, Spec: telemetryv1alpha1.LogPipelineSpec{ - Output: telemetryv1alpha1.Output{ - HTTP: &telemetryv1alpha1.HTTPOutput{ + Output: telemetryv1alpha1.LogPipelineOutput{ + HTTP: &telemetryv1alpha1.LogPipelineHTTPOutput{ Host: telemetryv1alpha1.ValueType{ Value: "localhost", }, Port: "443", URI: "/", - TLSConfig: telemetryv1alpha1.TLSConfig{ + TLS: telemetryv1alpha1.LogPipelineOutputTLS{ Disabled: true, }, }, diff --git a/webhook/logpipeline/validation/files_validator_test.go b/webhook/logpipeline/validation/files_validator_test.go index 3f0dd3e73..4596e6d3e 100644 --- a/webhook/logpipeline/validation/files_validator_test.go +++ b/webhook/logpipeline/validation/files_validator_test.go @@ -15,7 +15,7 @@ func TestDuplicateFileName(t *testing.T) { Name: "foo", }, Spec: telemetryv1alpha1.LogPipelineSpec{ - Files: []telemetryv1alpha1.FileMount{{ + Files: []telemetryv1alpha1.LogPipelineFileMount{{ Name: "f1.json", Content: "", }, @@ -31,7 +31,7 @@ func TestDuplicateFileName(t *testing.T) { Name: "bar", }, Spec: telemetryv1alpha1.LogPipelineSpec{ - Files: []telemetryv1alpha1.FileMount{{ + Files: []telemetryv1alpha1.LogPipelineFileMount{{ Name: "f1.json", Content: "", }, @@ -50,7 +50,7 @@ func TestDuplicateFileNameInSamePipeline(t *testing.T) { Name: "foo", }, Spec: telemetryv1alpha1.LogPipelineSpec{ - Files: []telemetryv1alpha1.FileMount{{ + Files: []telemetryv1alpha1.LogPipelineFileMount{{ Name: "f1.json", Content: "", }, { @@ -75,7 +75,7 @@ func TestValidateUpdatePipeline(t *testing.T) { Name: "foo", }, Spec: telemetryv1alpha1.LogPipelineSpec{ - Files: []telemetryv1alpha1.FileMount{{ + Files: []telemetryv1alpha1.LogPipelineFileMount{{ Name: "f1.json", Content: "", }, @@ -91,7 +91,7 @@ func TestValidateUpdatePipeline(t *testing.T) { Name: "foo", }, Spec: telemetryv1alpha1.LogPipelineSpec{ - Files: []telemetryv1alpha1.FileMount{{ + Files: []telemetryv1alpha1.LogPipelineFileMount{{ Name: "f1.json", Content: "", }, diff --git a/webhook/logpipeline/validation/variable_validator.go b/webhook/logpipeline/validation/variable_validator.go index 33e2a3c0c..7c545bc28 100644 --- a/webhook/logpipeline/validation/variable_validator.go +++ b/webhook/logpipeline/validation/variable_validator.go @@ -47,12 +47,12 @@ func (v *variablesValidator) Validate(logPipeline *telemetryv1alpha1.LogPipeline return nil } -func validateMandatoryFieldsAreEmpty(vr telemetryv1alpha1.VariableRef) bool { +func validateMandatoryFieldsAreEmpty(vr telemetryv1alpha1.LogPipelineVariableRef) bool { secretKey := vr.ValueFrom.SecretKeyRef return len(vr.Name) == 0 || len(secretKey.Key) == 0 || len(secretKey.Namespace) == 0 || len(secretKey.Name) == 0 } -func findConflictingVariables(logPipeLine *telemetryv1alpha1.LogPipeline, vr telemetryv1alpha1.VariableRef, existingPipelineName string) error { +func findConflictingVariables(logPipeLine *telemetryv1alpha1.LogPipeline, vr telemetryv1alpha1.LogPipelineVariableRef, existingPipelineName string) error { for _, v := range logPipeLine.Spec.Variables { if v.Name == vr.Name { return fmt.Errorf("variable name must be globally unique: variable '%s' is used in pipeline '%s'", v.Name, existingPipelineName) diff --git a/webhook/logpipeline/validation/variable_validator_test.go b/webhook/logpipeline/validation/variable_validator_test.go index e3f95ca40..dde54a26f 100644 --- a/webhook/logpipeline/validation/variable_validator_test.go +++ b/webhook/logpipeline/validation/variable_validator_test.go @@ -13,7 +13,7 @@ import ( func TestValidateSecretKeyRefs(t *testing.T) { logPipeline := &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Variables: []telemetryv1alpha1.VariableRef{ + Variables: []telemetryv1alpha1.LogPipelineVariableRef{ { Name: "foo1", ValueFrom: telemetryv1alpha1.ValueFromSource{ @@ -46,7 +46,7 @@ func TestValidateSecretKeyRefs(t *testing.T) { Name: "pipe2", }, Spec: telemetryv1alpha1.LogPipelineSpec{ - Variables: []telemetryv1alpha1.VariableRef{{ + Variables: []telemetryv1alpha1.LogPipelineVariableRef{{ Name: "foo2", ValueFrom: telemetryv1alpha1.ValueFromSource{ SecretKeyRef: &telemetryv1alpha1.SecretKeyRef{ @@ -67,7 +67,7 @@ func TestValidateSecretKeyRefs(t *testing.T) { func TestVariableValidator(t *testing.T) { logPipeline := &telemetryv1alpha1.LogPipeline{ Spec: telemetryv1alpha1.LogPipelineSpec{ - Variables: []telemetryv1alpha1.VariableRef{ + Variables: []telemetryv1alpha1.LogPipelineVariableRef{ { Name: "foo1", ValueFrom: telemetryv1alpha1.ValueFromSource{ diff --git a/webhook/logpipeline/webhook_test.go b/webhook/logpipeline/webhook_test.go index ede829578..353d309cf 100644 --- a/webhook/logpipeline/webhook_test.go +++ b/webhook/logpipeline/webhook_test.go @@ -108,8 +108,8 @@ func TestHandle(t *testing.T) { t.Run("should validate OTLP input based on output", func(t *testing.T) { type args struct { name string - output *telemetryv1alpha1.Output - input *telemetryv1alpha1.Input + output *telemetryv1alpha1.LogPipelineOutput + input *telemetryv1alpha1.LogPipelineInput allowed bool message string } @@ -117,7 +117,7 @@ func TestHandle(t *testing.T) { tests := []args{ { name: "otlp-input-and-output", - output: &telemetryv1alpha1.Output{ + output: &telemetryv1alpha1.LogPipelineOutput{ Custom: "", HTTP: nil, OTLP: &telemetryv1alpha1.OTLPOutput{ @@ -128,27 +128,27 @@ func TestHandle(t *testing.T) { }, }, }, - input: &telemetryv1alpha1.Input{ + input: &telemetryv1alpha1.LogPipelineInput{ OTLP: &telemetryv1alpha1.OTLPInput{}, }, allowed: true, }, { name: "otlp-input-and-fluentbit-output", - output: &telemetryv1alpha1.Output{ + output: &telemetryv1alpha1.LogPipelineOutput{ Custom: "", - HTTP: &telemetryv1alpha1.HTTPOutput{ + HTTP: &telemetryv1alpha1.LogPipelineHTTPOutput{ Host: telemetryv1alpha1.ValueType{Value: "127.0.0.1"}, Port: "8080", URI: "/", Format: "json", - TLSConfig: telemetryv1alpha1.TLSConfig{ + TLS: telemetryv1alpha1.LogPipelineOutputTLS{ Disabled: true, SkipCertificateValidation: true, }, }, }, - input: &telemetryv1alpha1.Input{ + input: &telemetryv1alpha1.LogPipelineInput{ OTLP: &telemetryv1alpha1.OTLPInput{}, }, allowed: false,