processors

package
v1.15.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 8, 2024 License: MIT Imports: 21 Imported by: 0

Documentation

Index

Constants

View Source
const AGGREGATE_PROCESSOR_NODE_NAME = "aggregate-v2"
View Source
const AGGREGATE_PROCESSOR_TYPE_NAME = "aggregate_v2"
View Source
const COMPACT_FIELDS_PROCESSOR_NODE_NAME = "compact-fields"
View Source
const COMPACT_FIELDS_PROCESSOR_TYPE_NAME = "compact_fields"
View Source
const DECRYPT_FIELDS_PROCESSOR_NODE_NAME = "decrypt-fields"
View Source
const DECRYPT_FIELDS_PROCESSOR_TYPE_NAME = "decrypt_fields"
View Source
const DEDUPE_PROCESSOR_NODE_NAME = "dedupe"
View Source
const DEDUPE_PROCESSOR_TYPE_NAME = DEDUPE_PROCESSOR_NODE_NAME
View Source
const DROP_FIELDS_PROCESSOR_NODE_NAME = "drop-fields"
View Source
const DROP_FIELDS_PROCESSOR_TYPE_NAME = "drop_fields"
View Source
const ENCRYPT_FIELDS_PROCESSOR_NODE_NAME = "encrypt-fields"
View Source
const ENCRYPT_FIELDS_PROCESSOR_TYPE_NAME = "encrypt_fields"
View Source
const EVENT_TO_METRIC_PROCESSOR_NODE_NAME = "event-to-metric"
View Source
const EVENT_TO_METRIC_PROCESSOR_TYPE_NAME = "event_to_metric"
View Source
const FILTER_PROCESSOR_NODE_NAME = "filter"
View Source
const FILTER_PROCESSOR_TYPE_NAME = "filter"
View Source
const FLATTEN_FIELDS_PROCESSOR_NODE_NAME = "flatten-fields"
View Source
const FLATTEN_FIELDS_PROCESSOR_TYPE_NAME = "flatten_fields"
View Source
const MAP_FIELDS_PROCESSOR_NODE_NAME = "map-fields"
View Source
const MAP_FIELDS_PROCESSOR_TYPE_NAME = "map_fields"
View Source
const METRICS_TAG_CARDINALITY_LIMIT_PROCESSOR_TYPE_NAME = "metrics_tag_cardinality_limit"
View Source
const METRICS_TAG_LIMIT_PROCESSOR_NODE_NAME = "metrics-tag-cardinality-limit"
View Source
const PARSE_PROCESSOR_NODE_NAME = "parse"
View Source
const PARSE_PROCESSOR_TYPE_NAME = PARSE_PROCESSOR_NODE_NAME
View Source
const PARSE_SEQUENTIALLY_PROCESSOR_NODE_NAME = "parse-sequentially"
View Source
const PARSE_SEQUENTIALLY_PROCESSOR_TYPE_NAME = "parse_sequentially"
View Source
const REDUCE_PROCESSOR_NODE_NAME = "reduce"
View Source
const REDUCE_PROCESSOR_TYPE_NAME = REDUCE_PROCESSOR_NODE_NAME
View Source
const ROUTE_PROCESSOR_NODE_NAME = ROUTE_PROCESSOR_TYPE_NAME
View Source
const ROUTE_PROCESSOR_TYPE_NAME = "route"
View Source
const SAMPLE_PROCESSOR_NODE_NAME = "sample"
View Source
const SAMPLE_PROCESSOR_TYPE_NAME = SAMPLE_PROCESSOR_NODE_NAME
View Source
const SCRIPT_EXECUTION_PROCESSOR_NODE_NAME = "js-script"
View Source
const SCRIPT_EXECUTION_PROCESSOR_TYPE_NAME = "script_execution"
View Source
const STRINGIFY_PROCESSOR_NODE_NAME = "stringify"
View Source
const STRINGIFY_PROCESSOR_TYPE_NAME = STRINGIFY_PROCESSOR_NODE_NAME
View Source
const UNROLL_PROCESSOR_NODE_NAME = "unroll"
View Source
const UNROLL_PROCESSOR_TYPE_NAME = UNROLL_PROCESSOR_NODE_NAME

Variables

View Source
var AggregateV2ProcessorResourceSchema = schema.Schema{
	Description: "Aggregates multiple metric events into a single metric event using either a tumbling interval window or a sliding interval window",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"window_type": schema.StringAttribute{
			Required:    true,
			Description: "The type of window to use when aggregating events (tumbling or sliding)",
			Validators:  []validator.String{stringvalidator.OneOf("tumbling", "sliding")},
		},
		"interval": schema.Int64Attribute{
			Required:    true,
			Description: "The interval over which events are aggregated in seconds",
		},
		"operation": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Description: "The operation in which to perform the aggregation",
			Validators:  []validator.String{stringvalidator.OneOf(MapKeys(OPERATIONS)...)},
		},
		"script": schema.StringAttribute{
			Optional: true,
			Computed: false,
		},
		"window_min": schema.Int64Attribute{
			Optional:    true,
			Computed:    true,
			Description: "",
		},
		"conditional": schema.SingleNestedAttribute{
			Optional:    true,
			Description: "When method is set to sliding: " + ParentConditionalAttribute.Description,
			Attributes:  ParentConditionalAttribute.Attributes,
		},
		"group_by": schema.ListAttribute{
			ElementType: basetypes.StringType{},
			Optional:    true,
			Computed:    true,
			Description: "Group events based on matching data from each of these field paths. Supports nesting via dot-notation.",
		},
		"event_timestamp": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Description: "Force accumulated event reduction to flush the result when a conditional expression evaluates to true on an inbound event.",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(1),
				stringvalidator.LengthAtMost(200),
			},
		},
	}),
}
View Source
var CompactFieldsProcessorResourceSchema = schema.Schema{
	Description: "Remove empty values from a list of fields",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"fields": schema.ListAttribute{
			ElementType: StringType,
			Required:    true,
			Description: "A list of fields to remove empty values from",
			Validators: []validator.List{
				listvalidator.SizeAtLeast(1),
				listvalidator.ValueStringsAre(stringvalidator.LengthAtLeast(1)),
			},
		},
		"compact_array": schema.BoolAttribute{
			Optional:    true,
			Description: "Remove empty arrays from a field",
			Computed:    true,
			Default:     booldefault.StaticBool(true),
		},
		"compact_object": schema.BoolAttribute{
			Optional:    true,
			Description: "Remove empty objects from a field",
			Computed:    true,
			Default:     booldefault.StaticBool(true),
		},
	}),
}
View Source
var DecryptFieldsProcessorResourceSchema = schema.Schema{
	Description: "Decrypts the value of the provided field",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"field": schema.StringAttribute{
			Required:    true,
			Description: "Field to decrypt. The value of the field must be a string",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(1),
			},
		},
		"algorithm": schema.StringAttribute{
			Required:    true,
			Description: "The algorithm with which the data was encrypted",
			Validators: []validator.String{
				stringvalidator.OneOf(EncryptionAlgorithms...),
			},
		},
		"key": schema.StringAttribute{
			Required:    true,
			Sensitive:   true,
			Description: "The key/secret used to encrypt the value",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(16),
				stringvalidator.LengthAtMost(32),
			},
		},
		"iv_field": schema.StringAttribute{
			Required:    true,
			Description: "The field from which to read the initialization vector, IV",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(1),
			},
		},
		"decode_raw_bytes": schema.BoolAttribute{
			Optional:    true,
			Computed:    true,
			Default:     booldefault.StaticBool(true),
			Description: "The field from which to read the initialization vector, IV",
		},
	}),
}
View Source
var DedupeProcessorResourceSchema = schema.Schema{
	Description: "Remove duplicates from the data stream",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"fields": schema.ListAttribute{
			ElementType: StringType,
			Required:    true,
			Description: "A list of fields on which to base deduping",
			Validators: []validator.List{
				listvalidator.SizeAtLeast(1),
				listvalidator.ValueStringsAre(stringvalidator.LengthAtLeast(1)),
			},
		},
		"number_of_events": schema.Int64Attribute{
			Computed:    true,
			Optional:    true,
			Description: "Number of events to compare across",
			Validators: []validator.Int64{
				int64validator.AtLeast(2),
				int64validator.AtMost(5000),
			},
			Default: int64default.StaticInt64(5000),
		},
		"comparison_type": schema.StringAttribute{
			Computed: true,
			Optional: true,
			Description: "When set to \"Match\" (default), it only compares across the fields which are" +
				" specified by the user. When set to \"Ignore\", it compares everything but the fields" +
				" specified by the user",
			Default: stringdefault.StaticString("Match"),
			Validators: []validator.String{
				stringvalidator.OneOf("Ignore", "Match"),
			},
		},
	}),
}
View Source
var DropFieldsProcessorResourceSchema = schema.Schema{
	Description: "Remove fields from the events",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"fields": schema.ListAttribute{
			ElementType: StringType,
			Required:    true,
			Description: "A list of fields to be removed",
			Validators: []validator.List{
				listvalidator.SizeAtLeast(1),
				listvalidator.ValueStringsAre(stringvalidator.LengthAtLeast(1)),
			},
		},
	}),
}
View Source
var EncryptFieldsProcessorResourceSchema = schema.Schema{
	Description: "Encrypts the value of the provided field",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"field": schema.StringAttribute{
			Required:    true,
			Description: "Field to encrypt. The value of the field must be a primitive (string, number, boolean).",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(1),
			},
		},
		"algorithm": schema.StringAttribute{
			Required:    true,
			Description: "The encryption algorithm to use on the field",
			Validators: []validator.String{
				stringvalidator.OneOf(EncryptionAlgorithms...),
			},
		},
		"key": schema.StringAttribute{
			Required:    true,
			Sensitive:   true,
			Description: "The encryption key",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(16),
				stringvalidator.LengthAtMost(32),
			},
		},
		"iv_field": schema.StringAttribute{
			Required: true,
			Description: "The field in which to store the generated initialization " +
				"vector, IV. Each encrypted value will have a unique IV.",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(1),
			},
		},
		"encode_raw_bytes": schema.BoolAttribute{
			Optional: true,
			Computed: true,
			Default:  booldefault.StaticBool(true),
			Description: "Encode the encrypted value and generated initialization " +
				"vector as Base64 text",
		},
	}),
}
View Source
var EventToMetricProcessorResourceSchema = schema.Schema{
	Description: "Allows conversion between arbitrary events and a Metric",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"metric_name": schema.StringAttribute{
			Required:    true,
			Description: "The machine name of the metric to emit",
			Validators: []validator.String{
				stringvalidator.RegexMatches(METRIC_NAME_REGEX, "has invalid characters; See documention"),
				stringvalidator.LengthAtLeast(1),
				stringvalidator.LengthAtMost(128),
			},
		},
		"metric_kind": schema.StringAttribute{
			Required: true,
			Description: "The kind of metric to emit, Absolute or Incremental. Absolute metrics represent " +
				"a complete value, and will generally replace an existing value for the metric in " +
				"the target destination. Incremental metrics represent an additive value which " +
				"is aggregated in the target destination to produce a new value.",
			Validators: []validator.String{
				stringvalidator.OneOf(MetricKind...),
			},
		},
		"metric_type": schema.StringAttribute{
			Required:    true,
			Description: "The type of metric to emit. For example, counter, sum, gauge.",
			Validators: []validator.String{
				stringvalidator.OneOf(MetricType...),
			},
		},
		"value_field": schema.StringAttribute{
			Optional:    true,
			Description: "The value of the metric should come from this event field path.",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(1),
				stringvalidator.ExactlyOneOf(path.MatchRelative().AtParent().AtName("value_number")),
			},
		},
		"value_number": schema.Float64Attribute{
			Optional:    true,
			Description: "Use this specified numeric value.",
		},
		"namespace_field": schema.StringAttribute{
			Optional:    true,
			Description: "The value of the namespace should come from this event field path.",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(1),
				stringvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("namespace_value")),
			},
		},
		"namespace_value": schema.StringAttribute{
			Optional:    true,
			Description: "The namespace value should be this specified string.",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(1),
			},
		},
		"tags": schema.ListNestedAttribute{
			Optional:    true,
			Description: "A set of tags (also called labels) to apply to the metric event.",
			Validators: []validator.List{
				listvalidator.SizeAtMost(10),
			},
			NestedObject: schema.NestedAttributeObject{
				Attributes: map[string]schema.Attribute{
					"name": schema.StringAttribute{
						Required:    true,
						Description: "The tag name",
						Validators: []validator.String{
							stringvalidator.LengthAtLeast(1),
							stringvalidator.LengthAtMost(128),
							stringvalidator.RegexMatches(METRIC_TAG_NAME_REGEX, "has invalid characters; See documention"),
						},
					},
					"value_type": schema.StringAttribute{
						Required:    true,
						Description: "Specifies if the value comes from an event field, or a new value input.",
						Validators: []validator.String{
							stringvalidator.OneOf("field", "value"),
						},
					},
					"value": schema.StringAttribute{
						Required: true,
						Description: "For value types, this is the value of the tag. If using a field type, the " +
							"value comes from this field path. Note that fields with highly-variable values will result " +
							"in high-cardinality metrics, which may impact storage or cost in downstream destinations.",
						Validators: []validator.String{
							stringvalidator.LengthAtLeast(1),
						},
					},
				},
			},
		},
	}),
}
View Source
var FilterProcessorResourceSchema = schema.Schema{
	Description: "Define condition(s) to include or exclude events from the pipeline",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"action": schema.StringAttribute{
			Description: "How to handle events matching this criteria",
			Required:    true,
			Validators: []validator.String{
				stringvalidator.OneOf("allow", "drop"),
			},
		},
		"conditional": schema.SingleNestedAttribute{
			Required:    true,
			Description: ParentConditionalAttribute.Description,
			Attributes:  ParentConditionalAttribute.Attributes,
		},
	}),
}
View Source
var FlattenFieldsProcessorResourceSchema = schema.Schema{
	Description: "Flattens the object or array value of a field into a single-level representation.",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"fields": schema.ListAttribute{
			ElementType: StringType,
			Optional:    true,
			Description: "A list of nested fields containing a value to flatten. When empty or omitted, the entire event will be flattened.",
			Validators: []validator.List{
				listvalidator.ValueStringsAre(stringvalidator.LengthAtLeast(1)),
			},
		},
		"delimiter": schema.StringAttribute{
			Computed:    true,
			Optional:    true,
			Description: "The separator to use between flattened field names",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(1),
				stringvalidator.LengthAtMost(1),
			},
			Default: stringdefault.StaticString("_"),
		},
	}),
}
View Source
var METRIC_NAME_REGEX = regexp.MustCompile("^[a-zA-Z][a-zA-Z0-9_:]*$")
View Source
var METRIC_TAG_NAME_REGEX = regexp.MustCompile("^[a-zA-Z][a-zA-Z0-9_]*$")
View Source
var MapFieldsProcessorResourceSchema = schema.Schema{
	Description: "Maps data from one field to another, either by moving or copying",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"mappings": schema.ListNestedAttribute{
			Required:    true,
			Description: "A list of field mappings. Mappings are applied in the order they are defined",
			Validators: []validator.List{
				listvalidator.SizeAtLeast(1),
			},
			NestedObject: schema.NestedAttributeObject{
				Attributes: map[string]schema.Attribute{
					"source_field": schema.StringAttribute{
						Required:    true,
						Description: "The field to copy data from",
						Validators: []validator.String{
							stringvalidator.LengthAtLeast(1),
						},
					},
					"target_field": schema.StringAttribute{
						Required:    true,
						Description: "The field to copy data into",
						Validators: []validator.String{
							stringvalidator.LengthAtLeast(1),
						},
					},
					"drop_source": schema.BoolAttribute{
						Optional: true,
						Computed: true,
						Default:  booldefault.StaticBool(false),
						Description: "When enabled, the source field is dropped after the data is copied " +
							"to the target field. Otherwise, it is preserved.",
					},
					"overwrite_target": schema.BoolAttribute{
						Optional: true,
						Computed: true,
						Default:  booldefault.StaticBool(false),
						Description: "When enabled, any existing data in the target field is overwritten. " +
							"Otherwise, the target field will be preserved and this mapping will " +
							"have no effect.",
					},
				},
			},
		},
	}),
}
View Source
var MetricsTagCardinalityLimitProcessorResourceSchema = schema.Schema{
	Description: "Limits the cardinality of metric events by either dropping events " +
		"or tags that exceed a specified value limit",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"tags": schema.ListAttribute{
			ElementType: StringType{},
			Optional:    true,
			Description: "A list of tags to apply cardinality limits. If none are provided, " +
				"all tags will be considered.",
			Validators: []validator.List{
				listvalidator.ValueStringsAre(stringvalidator.LengthAtLeast(1)),
				listvalidator.ValueStringsAre(stringvalidator.LengthAtMost(100)),
				listvalidator.SizeAtMost(10),
			},
		},
		"exclude_tags": schema.ListAttribute{
			ElementType: StringType{},
			Optional:    true,
			Description: "A list of tags to explicitly exclude from cardinality limits",
			Validators: []validator.List{
				listvalidator.ValueStringsAre(stringvalidator.LengthAtLeast(1)),
				listvalidator.ValueStringsAre(stringvalidator.LengthAtMost(100)),
				listvalidator.SizeAtMost(10),
			},
		},
		"action": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Default:     stringdefault.StaticString("drop_event"),
			Description: "The action to take when a tag's cardinality exceeds the value limit",
			Validators: []validator.String{
				stringvalidator.OneOf(LimitExceedAction...),
			},
		},
		"value_limit": schema.Int64Attribute{
			Required:    true,
			Description: "Maximum number of unique values for tags",
		},
		"mode": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Default:     stringdefault.StaticString("exact"),
			Description: "The method to used to reduce tag value cardinality",
			Validators: []validator.String{
				stringvalidator.OneOf(TagCardinalityMode...),
			},
		},
	}),
}
View Source
var OPERATIONS = map[string]string{
	"sum":                        "SUM",
	"average":                    "AVG",
	"set_intersection":           "SET_INTERSECTION",
	"distribution_concatenation": "DIST_CONCAT",
}
View Source
var ParseProcessorResourceSchema = schema.Schema{
	Description: "Parse a specified field using the chosen parser",
	Attributes:  ExtendBaseAttributes(parse_schema),
}
View Source
var ParseSequentiallyProcessorResourceSchema = schema.Schema{
	Description: "Parse a field using one of a list of ordered parsers. Parsing ends (short-circuits) on the first successful parse.",
	Attributes:  ExtendBaseAttributes(parse_sequential_schema),
}
View Source
var ReduceProcessorResourceSchema = schema.Schema{
	Description: "Combine multiple events over time into one based on a set of criteria",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"duration_ms": schema.Int64Attribute{
			Optional: true,
			Description: "The amount of time (in milliseconds) to allow streaming events to accumulate " +
				"into a single \"reduced\" event. The process repeats indefinitely, or until " +
				"an \"ends when\" condition is satisfied.",
			Computed: true,
			Default:  int64default.StaticInt64(30000),
		},
		"max_events": schema.Int64Attribute{
			Optional: true,
			Description: "The maximum number of events that can be included in a time window (specified " +
				"by duration_ms). The reduce operation will stop once it has reached this " +
				"number of events, regardless of whether the duration_ms have elapsed.",
		},
		"group_by": schema.ListAttribute{
			ElementType: StringType{},
			Optional:    true,
			Description: "Before reducing, group events based on matching data from each of these " +
				"field paths. Supports nesting via dot-notation.",
		},
		"date_formats": schema.ListNestedAttribute{
			Optional: true,
			Description: "Describes which root-level properties are dates, and their expected format. " +
				"Dot-notation is supported, but nested field lookup paths will be an error.",
			NestedObject: schema.NestedAttributeObject{
				Attributes: map[string]schema.Attribute{
					"field": schema.StringAttribute{
						Required:    true,
						Description: "Specifies a root-level path property that contains a date value.",
						Validators: []validator.String{
							stringvalidator.LengthAtLeast(1),
							stringvalidator.LengthAtMost(200),
						},
					},
					"format": schema.StringAttribute{
						Required:    true,
						Description: "The template describing the date format",
						Validators: []validator.String{
							stringvalidator.LengthAtLeast(1),
							stringvalidator.LengthAtMost(200),
						},
					},
				},
			},
			Validators: []validator.List{
				listvalidator.SizeAtLeast(1),
			},
		},
		"merge_strategies": schema.ListNestedAttribute{
			Optional: true,
			Description: "Specify merge strategies for individual root-level properties. " +
				"Dot-notation is supported, but nested field lookup paths will be an error.",
			NestedObject: schema.NestedAttributeObject{
				Attributes: map[string]schema.Attribute{
					"field": schema.StringAttribute{
						Required:    true,
						Description: "This is a root-level path property to apply a merge strategy to its value",
						Validators: []validator.String{
							stringvalidator.LengthAtLeast(1),
							stringvalidator.LengthAtMost(200),
						},
					},
					"strategy": schema.StringAttribute{
						Required:    true,
						Description: "The merge strategy to be used for the specified property",
						Validators: []validator.String{
							stringvalidator.OneOf(ReduceMergeStrategies...),
						},
					},
				},
			},
			Validators: []validator.List{
				listvalidator.SizeAtLeast(1),
			},
		},
		"flush_condition": schema.SingleNestedAttribute{
			Optional: true,
			Description: "Force accumulated event reduction to flush the result when a " +
				"conditional expression evaluates to true on an inbound event.",
			Attributes: map[string]schema.Attribute{
				"when": schema.StringAttribute{
					Required: true,
					Description: "Specifies whether to start a new reduction of events based on the " +
						"conditions, or end a current reduction based on them.",
					Validators: []validator.String{
						stringvalidator.OneOf("starts_when", "ends_when"),
					},
				},
				"conditional": ParentConditionalAttribute,
			},
		},
	}),
}
View Source
var RouteProcessorResourceSchema = schema.Schema{
	Description: "Route data based on whether or not it matches logical comparisons.",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"conditionals": schema.ListNestedAttribute{
			Required:    true,
			Description: "A list of conditions, each of which has a label and an expression or expression groups.",
			NestedObject: schema.NestedAttributeObject{
				Attributes: ExtendSchemaAttributes(ParentConditionalAttribute.Attributes, map[string]schema.Attribute{
					"label": schema.StringAttribute{
						Required:    true,
						Description: "A label for the expresion group",
						Validators: []validator.String{
							stringvalidator.LengthAtLeast(1),
							stringvalidator.LengthAtMost(20),
						},
					},
					"output_name": schema.StringAttribute{
						Computed: true,
						Description: "A system generated value to identify the results of this expression. " +
							"This value should be used when connecting the results to another processor or destination.",
					},
				}),
			},
		},
		"unmatched": schema.StringAttribute{
			Computed:    true,
			Description: "A system generated value to identify the results that don't match any condition.",
		},
	}),
}
View Source
var SampleProcessorResourceSchema = schema.Schema{
	Description: "Sample data at a given rate, retaining only a subset of data events for further processing",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"rate": schema.Int64Attribute{
			Computed: true,
			Optional: true,
			Description: "The rate at which events will be forwarded, expressed as 1/N. For example," +
				" `rate = 10` means 1 out of every 10 events will be forwarded and the rest" +
				" will be dropped",
			Validators: []validator.Int64{
				int64validator.AtLeast(2),
				int64validator.AtMost(10000),
			},
			Default: int64default.StaticInt64(10),
		},
		"always_include": schema.SingleNestedAttribute{
			Optional:    true,
			Description: "Events matching this criteria will always show up in the results",
			Attributes: map[string]schema.Attribute{
				"field": schema.StringAttribute{
					Required:    true,
					Description: "The field to use in a condition to always include in sampling",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
				"operator": schema.StringAttribute{
					Required: true,
					Description: "The comparison operator to check the value of the field or" +
						" whether the first exists",
					Validators: []validator.String{
						stringvalidator.OneOf(Operators...),
					},
				},
				"value_string": schema.StringAttribute{
					Optional:    true,
					Description: "The operand to compare the field value with, when the value is a string",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
				"value_number": schema.Float64Attribute{
					Optional:    true,
					Description: "The operand to compare the field value with, when the value is a number",
				},
				"case_sensitive": schema.BoolAttribute{
					Optional: true,
					Computed: true,

					Description: "Perform case sensitive comparison?",
					Validators: []validator.Bool{
						boolvalidator.AlsoRequires(
							path.MatchRelative().AtParent().AtName("operator"),
							path.MatchRelative().AtParent().AtName("value_string"),
						),
					},
				},
			},
		},
	}),
}
View Source
var ScriptExecutionProcessorResourceSchema = schema.Schema{
	Description: "Use JavaScript to reshape and transform your data" +
		" You can combine multiple actions like filtering, dropping," +
		" mapping, and casting inside of a single js script",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"script": schema.StringAttribute{
			Required:   true,
			Validators: []validator.String{stringvalidator.LengthAtLeast(1)},
			Description: "The script containing the JavaScript function that represents the " +
				"transformation of events flowing though the pipeline",
		},
	}),
}
View Source
var StringifyProcessorResourceSchema = schema.Schema{
	Description: "Represents a processor to stringify JSON data.",
	Attributes:  ExtendBaseAttributes(map[string]schema.Attribute{}),
}
View Source
var UnrollProcessorResourceSchema = schema.Schema{
	Description: "Takes an array of events and emits them all as individual events",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"field": schema.StringAttribute{
			Required:    true,
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
			Description: "The field name that contains an array of events",
		},
		"values_only": schema.BoolAttribute{
			Optional: true,
			Computed: true,
			Default:  booldefault.StaticBool(true),
			Description: "When enabled, the values from the specified array field will be emitted as " +
				"new events. Otherwise, the original event will be duplicated for each value " +
				"in the array field, with the unrolled value present in the field specified.",
		},
	}),
}

Functions

func AggregateV2ProcessorFromModel added in v1.11.0

func AggregateV2ProcessorFromModel(plan *AggregateV2ProcessorModel, previousState *AggregateV2ProcessorModel) (*Processor, diag.Diagnostics)

func AggregateV2ProcessorToModel added in v1.11.0

func AggregateV2ProcessorToModel(plan *AggregateV2ProcessorModel, component *Processor)

func CompactFieldsProcessorFromModel

func CompactFieldsProcessorFromModel(plan *CompactFieldsProcessorModel, previousState *CompactFieldsProcessorModel) (*Processor, diag.Diagnostics)

func CompactFieldsProcessorToModel

func CompactFieldsProcessorToModel(plan *CompactFieldsProcessorModel, component *Processor)

func DecryptFieldsProcessorFromModel

func DecryptFieldsProcessorFromModel(plan *DecryptFieldsProcessorModel, previousState *DecryptFieldsProcessorModel) (*Processor, diag.Diagnostics)

func DecryptFieldsProcessorToModel

func DecryptFieldsProcessorToModel(plan *DecryptFieldsProcessorModel, component *Processor)

func DedupeProcessorFromModel

func DedupeProcessorFromModel(plan *DedupeProcessorModel, previousState *DedupeProcessorModel) (*Processor, diag.Diagnostics)

func DedupeProcessorToModel

func DedupeProcessorToModel(plan *DedupeProcessorModel, component *Processor)

func DropFieldsProcessorFromModel

func DropFieldsProcessorFromModel(plan *DropFieldsProcessorModel, previousState *DropFieldsProcessorModel) (*Processor, diag.Diagnostics)

func DropFieldsProcessorToModel

func DropFieldsProcessorToModel(plan *DropFieldsProcessorModel, component *Processor)

func EncryptFieldsProcessorFromModel

func EncryptFieldsProcessorFromModel(plan *EncryptFieldsProcessorModel, previousState *EncryptFieldsProcessorModel) (*Processor, diag.Diagnostics)

func EncryptFieldsProcessorToModel

func EncryptFieldsProcessorToModel(plan *EncryptFieldsProcessorModel, component *Processor)

func EventToMetricProcessorFromModel added in v1.0.2

func EventToMetricProcessorFromModel(plan *EventToMetricProcessorModel, previousState *EventToMetricProcessorModel) (*Processor, diag.Diagnostics)

func EventToMetricProcessorToModel added in v1.0.2

func EventToMetricProcessorToModel(plan *EventToMetricProcessorModel, component *Processor)

func FilterProcessorFromModel added in v1.0.3

func FilterProcessorFromModel(plan *FilterProcessorModel, previousState *FilterProcessorModel) (*Processor, diag.Diagnostics)

func FilterProcessorToModel added in v1.0.3

func FilterProcessorToModel(plan *FilterProcessorModel, component *Processor)

func FlattenFieldsProcessorFromModel

func FlattenFieldsProcessorFromModel(plan *FlattenFieldsProcessorModel, previousState *FlattenFieldsProcessorModel) (*Processor, diag.Diagnostics)

func FlattenFieldsProcessorToModel

func FlattenFieldsProcessorToModel(plan *FlattenFieldsProcessorModel, component *Processor)

func MapFieldsProcessorFromModel added in v1.0.1

func MapFieldsProcessorFromModel(plan *MapFieldsProcessorModel, previousState *MapFieldsProcessorModel) (*Processor, diag.Diagnostics)

func MapFieldsProcessorToModel added in v1.0.1

func MapFieldsProcessorToModel(plan *MapFieldsProcessorModel, component *Processor)

func MetricsTagCardinalityLimitProcessorFromModel added in v1.0.2

func MetricsTagCardinalityLimitProcessorFromModel(plan *MetricsTagCardinalityLimitProcessorModel, previousState *MetricsTagCardinalityLimitProcessorModel) (*Processor, diag.Diagnostics)

func MetricsTagCardinalityLimitProcessorToModel added in v1.0.2

func MetricsTagCardinalityLimitProcessorToModel(plan *MetricsTagCardinalityLimitProcessorModel, component *Processor)

func ParseProcessorFromModel

func ParseProcessorFromModel(plan *ParseProcessorModel, previousState *ParseProcessorModel) (*Processor, diag.Diagnostics)

func ParseProcessorToModel

func ParseProcessorToModel(plan *ParseProcessorModel, component *Processor)

func ParseSequentiallyProcessorFromModel

func ParseSequentiallyProcessorFromModel(plan *ParseSequentiallyProcessorModel, previousState *ParseSequentiallyProcessorModel) (*Processor, diag.Diagnostics)

func ParseSequentiallyProcessorToModel

func ParseSequentiallyProcessorToModel(plan *ParseSequentiallyProcessorModel, component *Processor)

func ReduceProcessorFromModel

func ReduceProcessorFromModel(plan *ReduceProcessorModel, previousState *ReduceProcessorModel) (*Processor, diag.Diagnostics)

func ReduceProcessorToModel

func ReduceProcessorToModel(plan *ReduceProcessorModel, component *Processor)

func RouteProcessorFromModel

func RouteProcessorFromModel(plan *RouteProcessorModel, previousState *RouteProcessorModel) (*Processor, diag.Diagnostics)

func RouteProcessorToModel

func RouteProcessorToModel(plan *RouteProcessorModel, component *Processor)

func SampleProcessorFromModel

func SampleProcessorFromModel(plan *SampleProcessorModel, previousState *SampleProcessorModel) (*Processor, diag.Diagnostics)

func SampleProcessorToModel

func SampleProcessorToModel(plan *SampleProcessorModel, component *Processor)

func ScriptExecutionProcessorFromModel

func ScriptExecutionProcessorFromModel(plan *ScriptExecutionProcessorModel, previousState *ScriptExecutionProcessorModel) (*Processor, diag.Diagnostics)

func ScriptExecutionProcessorToModel

func ScriptExecutionProcessorToModel(plan *ScriptExecutionProcessorModel, component *Processor)

func StringifyProcessorFromModel

func StringifyProcessorFromModel(plan *StringifyProcessorModel, previousState *StringifyProcessorModel) (*Processor, diag.Diagnostics)

func StringifyProcessorToModel

func StringifyProcessorToModel(plan *StringifyProcessorModel, component *Processor)

func StripUnknownOptions added in v1.0.4

func StripUnknownOptions(attr_type_keys []string, options map[string]any) map[string]any

func UnrollProcessorFromModel

func UnrollProcessorFromModel(plan *UnrollProcessorModel, previousState *UnrollProcessorModel) (*Processor, diag.Diagnostics)

func UnrollProcessorToModel

func UnrollProcessorToModel(plan *UnrollProcessorModel, component *Processor)

Types

type AggregateV2ProcessorModel added in v1.11.0

type AggregateV2ProcessorModel struct {
	Id             String              `tfsdk:"id"`
	PipelineId     String              `tfsdk:"pipeline_id"`
	Title          String              `tfsdk:"title"`
	Description    String              `tfsdk:"description"`
	Inputs         List                `tfsdk:"inputs"`
	GenerationId   Int64               `tfsdk:"generation_id"`
	Interval       Int64               `tfsdk:"interval" user_config:"true"`
	Minimum        Int64               `tfsdk:"window_min" user_config:"true"`
	Conditional    Object              `tfsdk:"conditional" user_config:"true"`
	GroupBy        basetypes.ListValue `tfsdk:"group_by" user_config:"true"`
	Script         String              `tfsdk:"script" user_config:"true"`
	WindowType     String              `tfsdk:"window_type" user_config:"true"`
	Operation      String              `tfsdk:"operation" user_config:"true"`
	EventTimestamp String              `tfsdk:"event_timestamp" user_config:"true"`
}

type CompactFieldsProcessorModel

type CompactFieldsProcessorModel struct {
	Id            String `tfsdk:"id"`
	PipelineId    String `tfsdk:"pipeline_id"`
	Title         String `tfsdk:"title"`
	Description   String `tfsdk:"description"`
	Inputs        List   `tfsdk:"inputs"`
	GenerationId  Int64  `tfsdk:"generation_id"`
	Fields        List   `tfsdk:"fields" user_config:"true"`
	CompactArray  Bool   `tfsdk:"compact_array" user_config:"true"`
	CompactObject Bool   `tfsdk:"compact_object" user_config:"true"`
}

type DecryptFieldsProcessorModel

type DecryptFieldsProcessorModel struct {
	Id             String `tfsdk:"id"`
	PipelineId     String `tfsdk:"pipeline_id"`
	Title          String `tfsdk:"title"`
	Description    String `tfsdk:"description"`
	Inputs         List   `tfsdk:"inputs"`
	GenerationId   Int64  `tfsdk:"generation_id"`
	Field          String `tfsdk:"field" user_config:"true"`
	Algorithm      String `tfsdk:"algorithm" user_config:"true"`
	Key            String `tfsdk:"key" user_config:"true"`
	IvField        String `tfsdk:"iv_field" user_config:"true"`
	DecodeRawBytes Bool   `tfsdk:"decode_raw_bytes" user_config:"true"`
}

type DedupeProcessorModel

type DedupeProcessorModel struct {
	Id             String `tfsdk:"id"`
	PipelineId     String `tfsdk:"pipeline_id"`
	Title          String `tfsdk:"title"`
	Description    String `tfsdk:"description"`
	Inputs         List   `tfsdk:"inputs"`
	GenerationId   Int64  `tfsdk:"generation_id"`
	Fields         List   `tfsdk:"fields" user_config:"true"`
	NumberOfEvents Int64  `tfsdk:"number_of_events" user_config:"true"`
	ComparisonType String `tfsdk:"comparison_type" user_config:"true"`
}

type DropFieldsProcessorModel

type DropFieldsProcessorModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	Inputs       List   `tfsdk:"inputs"`
	GenerationId Int64  `tfsdk:"generation_id"`
	Fields       List   `tfsdk:"fields" user_config:"true"`
}

type EncryptFieldsProcessorModel

type EncryptFieldsProcessorModel struct {
	Id             String `tfsdk:"id"`
	PipelineId     String `tfsdk:"pipeline_id"`
	Title          String `tfsdk:"title"`
	Description    String `tfsdk:"description"`
	Inputs         List   `tfsdk:"inputs"`
	GenerationId   Int64  `tfsdk:"generation_id"`
	Field          String `tfsdk:"field" user_config:"true"`
	Algorithm      String `tfsdk:"algorithm" user_config:"true"`
	Key            String `tfsdk:"key" user_config:"true"`
	IvField        String `tfsdk:"iv_field" user_config:"true"`
	EncodeRawBytes Bool   `tfsdk:"encode_raw_bytes" user_config:"true"`
}

type EventToMetricProcessorModel added in v1.0.2

type EventToMetricProcessorModel struct {
	Id             StringValue  `tfsdk:"id"`
	PipelineId     StringValue  `tfsdk:"pipeline_id"`
	Title          StringValue  `tfsdk:"title"`
	Description    StringValue  `tfsdk:"description"`
	Inputs         ListValue    `tfsdk:"inputs"`
	GenerationId   Int64Value   `tfsdk:"generation_id"`
	MetricName     StringValue  `tfsdk:"metric_name" user_config:"true"`
	MetricKind     StringValue  `tfsdk:"metric_kind" user_config:"true"`
	MetricType     StringValue  `tfsdk:"metric_type" user_config:"true"`
	ValueField     StringValue  `tfsdk:"value_field"`  // Differs from user_config. Manually construct to make types easier.
	ValueNumber    Float64Value `tfsdk:"value_number"` // Ditto. This keeps data types for numbers and strings simpler.
	NamespaceField StringValue  `tfsdk:"namespace_field"`
	NamespaceValue StringValue  `tfsdk:"namespace_value"`
	Tags           ListValue    `tfsdk:"tags" user_config:"true"`
}

type FilterProcessorModel added in v1.0.3

type FilterProcessorModel struct {
	Id           StringValue `tfsdk:"id"`
	PipelineId   StringValue `tfsdk:"pipeline_id"`
	Title        StringValue `tfsdk:"title"`
	Description  StringValue `tfsdk:"description"`
	Inputs       ListValue   `tfsdk:"inputs"`
	GenerationId Int64Value  `tfsdk:"generation_id"`
	Action       StringValue `tfsdk:"action" user_config:"true"`
	Conditional  ObjectValue `tfsdk:"conditional" user_config:"true"`
}

type FlattenFieldsProcessorModel

type FlattenFieldsProcessorModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	Inputs       List   `tfsdk:"inputs"`
	GenerationId Int64  `tfsdk:"generation_id"`
	Fields       List   `tfsdk:"fields" user_config:"true"`
	Delimiter    String `tfsdk:"delimiter" user_config:"true"`
}

type MapFieldsProcessorModel added in v1.0.1

type MapFieldsProcessorModel struct {
	Id           StringValue `tfsdk:"id"`
	PipelineId   StringValue `tfsdk:"pipeline_id"`
	Title        StringValue `tfsdk:"title"`
	Description  StringValue `tfsdk:"description"`
	Inputs       ListValue   `tfsdk:"inputs"`
	GenerationId Int64Value  `tfsdk:"generation_id"`
	Mappings     ListValue   `tfsdk:"mappings" user_config:"true"`
}

type MetricsTagCardinalityLimitProcessorModel added in v1.0.2

type MetricsTagCardinalityLimitProcessorModel struct {
	Id           StringValue `tfsdk:"id"`
	PipelineId   StringValue `tfsdk:"pipeline_id"`
	Title        StringValue `tfsdk:"title"`
	Description  StringValue `tfsdk:"description"`
	Inputs       ListValue   `tfsdk:"inputs"`
	GenerationId Int64Value  `tfsdk:"generation_id"`
	Tags         ListValue   `tfsdk:"tags" user_config:"true"`
	ExcludeTags  ListValue   `tfsdk:"exclude_tags" user_config:"true"`
	Action       StringValue `tfsdk:"action" user_config:"true"`
	ValueLimit   Int64Value  `tfsdk:"value_limit" user_config:"true"`
	Mode         StringValue `tfsdk:"mode" user_config:"true"`
}

type ParseProcessorModel

type ParseProcessorModel struct {
	Id               String `tfsdk:"id"`
	PipelineId       String `tfsdk:"pipeline_id"`
	Title            String `tfsdk:"title"`
	Description      String `tfsdk:"description"`
	Inputs           List   `tfsdk:"inputs"`
	GenerationId     Int64  `tfsdk:"generation_id"`
	Field            String `tfsdk:"field" user_config:"true"`
	TargetField      String `tfsdk:"target_field" user_config:"true"`
	Parser           String `tfsdk:"parser" user_config:"true"`
	ApacheOptions    Object `tfsdk:"apache_log_options" user_config:"true"`
	CefOptions       Object `tfsdk:"cef_log_options" user_config:"true"`
	CsvOptions       Object `tfsdk:"csv_row_options" user_config:"true"`
	GrokOptions      Object `tfsdk:"grok_parser_options" user_config:"true"`
	KeyValueOptions  Object `tfsdk:"key_value_log_options" user_config:"true"`
	NginxOptions     Object `tfsdk:"nginx_log_options" user_config:"true"`
	RegexOptions     Object `tfsdk:"regex_parser_options" user_config:"true"`
	TimestampOptions Object `tfsdk:"timestamp_parser_options" user_config:"true"`
}

type ParseSequentiallyProcessorModel

type ParseSequentiallyProcessorModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	Inputs       List   `tfsdk:"inputs"`
	GenerationId Int64  `tfsdk:"generation_id"`
	Field        String `tfsdk:"field" user_config:"true"`
	TargetField  String `tfsdk:"target_field" user_config:"true"`
	Parsers      List   `tfsdk:"parsers" user_config:"true"`
	// unmatched exists in outputs from API which is not exposed to the
	// user. Mapped to user_config to make TF happy
	Unmatched String `tfsdk:"unmatched" user_config:"true"`
}

type ReduceProcessorModel

type ReduceProcessorModel struct {
	Id              StringValue `tfsdk:"id"`
	PipelineId      StringValue `tfsdk:"pipeline_id"`
	Title           StringValue `tfsdk:"title"`
	Description     StringValue `tfsdk:"description"`
	Inputs          ListValue   `tfsdk:"inputs"`
	GenerationId    Int64Value  `tfsdk:"generation_id"`
	DurationMs      Int64Value  `tfsdk:"duration_ms" user_config:"true"`
	MaxEvents       Int64Value  `tfsdk:"max_events" user_config:"true"`
	GroupBy         ListValue   `tfsdk:"group_by" user_config:"true"`
	DateFormats     ListValue   `tfsdk:"date_formats" user_config:"true"`
	MergeStrategies ListValue   `tfsdk:"merge_strategies" user_config:"true"`
	FlushCondition  ObjectValue `tfsdk:"flush_condition" user_config:"true"`
}

type RouteProcessorModel

type RouteProcessorModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	Inputs       List   `tfsdk:"inputs"`
	GenerationId Int64  `tfsdk:"generation_id"`
	Conditionals List   `tfsdk:"conditionals" user_config:"true"`
	// unmatched exists in outputs from API which is not exposed to the
	// user. Mapped to user_config to make TF happy
	Unmatched String `tfsdk:"unmatched" user_config:"true"`
}

type SampleProcessorModel

type SampleProcessorModel struct {
	Id            String `tfsdk:"id"`
	PipelineId    String `tfsdk:"pipeline_id"`
	Title         String `tfsdk:"title"`
	Description   String `tfsdk:"description"`
	Inputs        List   `tfsdk:"inputs"`
	GenerationId  Int64  `tfsdk:"generation_id"`
	Rate          Int64  `tfsdk:"rate" user_config:"true"`
	AlwaysInclude Object `tfsdk:"always_include" user_config:"true"`
}

type SchemaAttributes

type SchemaAttributes map[string]schema.Attribute

func ExtendBaseAttributes

func ExtendBaseAttributes(target SchemaAttributes) SchemaAttributes

func ExtendSchemaAttributes

func ExtendSchemaAttributes(fromAttributes SchemaAttributes, toAttributes SchemaAttributes) SchemaAttributes

type ScriptExecutionProcessorModel

type ScriptExecutionProcessorModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	Inputs       List   `tfsdk:"inputs"`
	GenerationId Int64  `tfsdk:"generation_id"`
	Script       String `tfsdk:"script" user_config:"true"`
}

type StringifyProcessorModel

type StringifyProcessorModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	Inputs       List   `tfsdk:"inputs"`
	GenerationId Int64  `tfsdk:"generation_id"`
}

type UnrollProcessorModel

type UnrollProcessorModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	Inputs       List   `tfsdk:"inputs"`
	GenerationId Int64  `tfsdk:"generation_id"`
	Field        String `tfsdk:"field" user_config:"true"`
	ValuesOnly   Bool   `tfsdk:"values_only" user_config:"true"`
}

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL