kafka

package
v0.0.0-...-16a0e24 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 25, 2020 License: MIT Imports: 12 Imported by: 0

README

Kafka Output Plugin

This plugin writes to a Kafka Broker acting a Kafka Producer.

Configuration:
[[outputs.kafka]]
  ## URLs of kafka brokers
  brokers = ["localhost:9092"]
  ## Kafka topic for producer messages
  topic = "telegraf"

  ## The value of this tag will be used as the topic.  If not set the 'topic'
  ## option is used.
  # topic_tag = ""

  ## If true, the 'topic_tag' will be removed from to the metric.
  # exclude_topic_tag = false

  ## Optional Client id
  # client_id = "Telegraf"

  ## Set the minimal supported Kafka version.  Setting this enables the use of new
  ## Kafka features and APIs.  Of particular interested, lz4 compression
  ## requires at least version 0.10.0.0.
  ##   ex: version = "1.1.0"
  # version = ""

  ## Optional topic suffix configuration.
  ## If the section is omitted, no suffix is used.
  ## Following topic suffix methods are supported:
  ##   measurement - suffix equals to separator + measurement's name
  ##   tags        - suffix equals to separator + specified tags' values
  ##                 interleaved with separator

  ## Suffix equals to "_" + measurement name
  # [outputs.kafka.topic_suffix]
  #   method = "measurement"
  #   separator = "_"

  ## Suffix equals to "__" + measurement's "foo" tag value.
  ##   If there's no such a tag, suffix equals to an empty string
  # [outputs.kafka.topic_suffix]
  #   method = "tags"
  #   keys = ["foo"]
  #   separator = "__"

  ## Suffix equals to "_" + measurement's "foo" and "bar"
  ##   tag values, separated by "_". If there is no such tags,
  ##   their values treated as empty strings.
  # [outputs.kafka.topic_suffix]
  #   method = "tags"
  #   keys = ["foo", "bar"]
  #   separator = "_"

  ## The routing tag specifies a tagkey on the metric whose value is used as
  ## the message key.  The message key is used to determine which partition to
  ## send the message to.  This tag is prefered over the routing_key option.
  routing_tag = "host"

  ## The routing key is set as the message key and used to determine which
  ## partition to send the message to.  This value is only used when no
  ## routing_tag is set or as a fallback when the tag specified in routing tag
  ## is not found.
  ##
  ## If set to "random", a random value will be generated for each message.
  ##
  ## When unset, no message key is added and each message is routed to a random
  ## partition.
  ##
  ##   ex: routing_key = "random"
  ##       routing_key = "telegraf"
  # routing_key = ""

  ## CompressionCodec represents the various compression codecs recognized by
  ## Kafka in messages.
  ##  0 : No compression
  ##  1 : Gzip compression
  ##  2 : Snappy compression
  ##  3 : LZ4 compression
  # compression_codec = 0

  ##  RequiredAcks is used in Produce Requests to tell the broker how many
  ##  replica acknowledgements it must see before responding
  ##   0 : the producer never waits for an acknowledgement from the broker.
  ##       This option provides the lowest latency but the weakest durability
  ##       guarantees (some data will be lost when a server fails).
  ##   1 : the producer gets an acknowledgement after the leader replica has
  ##       received the data. This option provides better durability as the
  ##       client waits until the server acknowledges the request as successful
  ##       (only messages that were written to the now-dead leader but not yet
  ##       replicated will be lost).
  ##   -1: the producer gets an acknowledgement after all in-sync replicas have
  ##       received the data. This option provides the best durability, we
  ##       guarantee that no messages will be lost as long as at least one in
  ##       sync replica remains.
  # required_acks = -1

  ## The maximum number of times to retry sending a metric before failing
  ## until the next flush.
  # max_retry = 3

  ## Optional TLS Config
  # tls_ca = "/etc/telegraf/ca.pem"
  # tls_cert = "/etc/telegraf/cert.pem"
  # tls_key = "/etc/telegraf/key.pem"
  ## Use TLS but skip chain & host verification
  # insecure_skip_verify = false

  ## Optional SASL Config
  # sasl_username = "kafka"
  # sasl_password = "secret"

  ## SASL protocol version.  When connecting to Azure EventHub set to 0.
  # sasl_version = 1

  ## Data format to output.
  ## Each data format has its own unique set of configuration options, read
  ## more about them here:
  ## https://gitee.com/zhimiao/qiansi-telegraf/blob/master/docs/DATA_FORMATS_OUTPUT.md
  # data_format = "influx"
max_retry

This option controls the number of retries before a failure notification is displayed for each message when no acknowledgement is received from the broker. When the setting is greater than 0, message latency can be reduced, duplicate messages can occur in cases of transient errors, and broker loads can increase during downtime.

The option is similar to the retries Producer option in the Java Kafka Producer.

Documentation

Index

Constants

This section is empty.

Variables

View Source
var ValidTopicSuffixMethods = []string{
	"",
	"measurement",
	"tags",
}

Functions

func ValidateTopicSuffixMethod

func ValidateTopicSuffixMethod(method string) error

Types

type DebugLogger

type DebugLogger struct {
}

DebugLogger logs messages from sarama at the debug level.

func (*DebugLogger) Print

func (*DebugLogger) Print(v ...interface{})

func (*DebugLogger) Printf

func (*DebugLogger) Printf(format string, v ...interface{})

func (*DebugLogger) Println

func (*DebugLogger) Println(v ...interface{})

type Kafka

type Kafka struct {
	Brokers          []string    `toml:"brokers"`
	Topic            string      `toml:"topic"`
	TopicTag         string      `toml:"topic_tag"`
	ExcludeTopicTag  bool        `toml:"exclude_topic_tag"`
	ClientID         string      `toml:"client_id"`
	TopicSuffix      TopicSuffix `toml:"topic_suffix"`
	RoutingTag       string      `toml:"routing_tag"`
	RoutingKey       string      `toml:"routing_key"`
	CompressionCodec int         `toml:"compression_codec"`
	RequiredAcks     int         `toml:"required_acks"`
	MaxRetry         int         `toml:"max_retry"`
	MaxMessageBytes  int         `toml:"max_message_bytes"`

	Version string `toml:"version"`

	// Legacy TLS config options
	// TLS client certificate
	Certificate string
	// TLS client key
	Key string
	// TLS certificate authority
	CA string

	EnableTLS *bool `toml:"enable_tls"`
	tlsint.ClientConfig

	SASLUsername string `toml:"sasl_username"`
	SASLPassword string `toml:"sasl_password"`
	SASLVersion  *int   `toml:"sasl_version"`

	Log telegraf.Logger `toml:"-"`
	// contains filtered or unexported fields
}

func (*Kafka) Close

func (k *Kafka) Close() error

func (*Kafka) Connect

func (k *Kafka) Connect() error

func (*Kafka) Description

func (k *Kafka) Description() string

func (*Kafka) GetTopicName

func (k *Kafka) GetTopicName(metric telegraf.Metric) (telegraf.Metric, string)

func (*Kafka) SampleConfig

func (k *Kafka) SampleConfig() string

func (*Kafka) SetSerializer

func (k *Kafka) SetSerializer(serializer serializers.Serializer)

func (*Kafka) Write

func (k *Kafka) Write(metrics []telegraf.Metric) error

type TopicSuffix

type TopicSuffix struct {
	Method    string   `toml:"method"`
	Keys      []string `toml:"keys"`
	Separator string   `toml:"separator"`
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL