logfile

package
v1.3000030.2 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Nov 2, 2023 License: MIT Imports: 22 Imported by: 0

README

Logs Input Plugin

The logs plugin "tails" a logfile and parses each log message.

By default, the tail plugin acts like the following unix tail command:

tail -F --lines=0 myfile.log
  • -F means that it will follow the name of the given file, so that it will be compatible with log-rotated files, and that it will retry on inaccessible files.
  • --lines=0 means that it will start at the end of the file (unless the from_beginning option is set).

see http://man7.org/linux/man-pages/man1/tail.1.html for more details.

The plugin expects messages in one of the Telegraf Input Data Formats.

Configuration:
# Stream a log file, like the tail -f command
  [[inputs.logs]]
  ## files to tail.
  ## These accept standard unix glob matching rules, but with the addition of
  ## ** as a "super asterisk". ie:
  ##   "/var/log/**.log"  -> recursively find all .log files in /var/log
  ##   "/var/log/*/*.log" -> find all .log files with a parent dir in /var/log
  ##   "/var/log/apache.log" -> just tail the apache log file
  ##
  ## See https://github.com/gobwas/glob for more examples
  ##
  ## Default log output destination name for all file_configs
  ## each file_config can override its own destination if needed
  destination = "cloudwatchlogs"

  ## folder path where state of how much of a file has been transferred is stored
  file_state_folder = "/tmp/logfile/state"

  [[inputs.logs.file_config]]
      file_path = "/tmp/logfile.log*"
      log_group_name = "logfile.log"
      log_stream_name = "<log_stream_name>"
      timestamp_regex = "^(\\d{2} \\w{3} \\d{4} \\d{2}:\\d{2}:\\d{2}).*$"
      timestamp_layout = ["_2 Jan 2006 15:04:05"]
      timezone = "UTC"
      multi_line_start_pattern = "{timestamp_regex}"
      ## Read file from beginning.
      from_beginning = false
      ## Whether file is a named pipe
      pipe = false
      retention_in_days = -1
      destination = "cloudwatchlogs"
  [[inputs.logs.file_config]]
      file_path = "/var/log/*.log"
      ## Regular expression for log files to ignore
      blacklist = "journal|syslog"
      ## Publish all log files that match file_path
      publish_multi_logs = true
      log_group_name = "varlog"
      log_stream_name = "<log_stream_name>"
      timestamp_regex = "^(\\d{2} \\w{3} \\d{4} \\d{2}:\\d{2}:\\d{2}).*$"
      timestamp_layout = ["_2 Jan 2006 15:04:05"]
      timezone = "UTC"
      multi_line_start_pattern = "{timestamp_regex}"
      ## Read file from beginning.
      from_beginning = false
      ## Whether file is a named pipe
      pipe = false
      retention_in_days = -1
      destination = "cloudwatchlogs"
      ## Max size of each log event, defaults to 262144 (256KB)
      max_event_size = 262144
      ## Suffix to be added to truncated logline to indicate its truncation, defaults to "[Truncated...]"
      truncate_suffix = "[Truncated...]"

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func NewTailerSrc

func NewTailerSrc(
	group, stream, destination, stateFilePath string,
	tailer *tail.Tail,
	autoRemoval bool,
	isMultilineStartFn func(string) bool,
	filters []*LogFilter,
	timestampFn func(string) time.Time,
	enc encoding.Encoding,
	maxEventSize int,
	truncateSuffix string,
	retentionInDays int,
) *tailerSrc

func ShouldPublish added in v1.247350.0

func ShouldPublish(logGroupName, logStreamName string, filters []*LogFilter, event logs.LogEvent) bool

Types

type FileConfig

type FileConfig struct {
	//The file path for input log file.
	FilePath string `toml:"file_path"`
	//The blacklist used to filter out some files
	Blacklist string `toml:"blacklist"`

	PublishMultiLogs bool `toml:"publish_multi_logs"`

	Encoding string `toml:"encoding"`
	//The log group name for the input log file.
	LogGroupName string `toml:"log_group_name"`
	//log stream name
	LogStreamName string `toml:"log_stream_name"`

	//The regex of the timestampFromLogLine presents in the log entry
	TimestampRegex string `toml:"timestamp_regex"`
	//The timestampFromLogLine layout used in GoLang to parse the timestampFromLogLine.
	TimestampLayout []string `toml:"timestamp_layout"`
	//The time zone used to parse the timestampFromLogLine in the log entry.
	Timezone string `toml:"timezone"`

	//Indicate whether it is a start of multiline.
	//If this config is not present, it means the multiline mode is disabled.
	//If this config is specified as "{timestamp_regex}", it means to use the same regex as timestampFromLogLine.
	//If this config is specified as some regex, it will use the regex to determine if this line is a start line of multiline entry.
	MultiLineStartPattern string `toml:"multi_line_start_pattern"`

	// automatically remove the file / symlink after uploading.
	// This auto removal does not support the case where other log rotation mechanism is already in place.
	AutoRemoval bool `toml:"auto_removal"`

	//Indicate whether to tail the log file from the beginning or not.
	//The default value for this field should be set as true in configuration.
	//Otherwise, it may skip some log entries for timestampFromLogLine suffix roatated new file.
	FromBeginning bool `toml:"from_beginning"`
	//Indicate whether it is a named pipe.
	Pipe bool `toml:"pipe"`

	//Indicate logType for scroll
	LogType string `toml:"log_type"`

	//Log Destination override
	Destination string `toml:"destination"`

	//Max size for a single log event to be in bytes
	MaxEventSize int `toml:"max_event_size"`

	//Suffix to be added to truncated logline to indicate its truncation
	TruncateSuffix string `toml:"truncate_suffix"`

	//Indicate retention in days for log group
	RetentionInDays int `toml:"retention_in_days"`

	Filters []*LogFilter `toml:"filters"`

	//Time *time.Location Go type timezone info.
	TimezoneLoc *time.Location
	//Regexp go type timestampFromLogLine regex
	TimestampRegexP *regexp.Regexp
	//Regexp go type multiline start regex
	MultiLineStartPatternP *regexp.Regexp
	//Regexp go type blacklist regex
	BlacklistRegexP *regexp.Regexp
	//Decoder object
	Enc encoding.Encoding
	// contains filtered or unexported fields
}

The file config presents the structure of configuration for a file to be tailed.

type LogEvent

type LogEvent struct {
	// contains filtered or unexported fields
}

func (LogEvent) Done

func (le LogEvent) Done()

func (LogEvent) Message

func (le LogEvent) Message() string

func (LogEvent) Time

func (le LogEvent) Time() time.Time

type LogFile

type LogFile struct {
	//array of file config for file to be monitored.
	FileConfig []FileConfig `toml:"file_config"`
	//store the offset of file already published.
	FileStateFolder string `toml:"file_state_folder"`
	//destination
	Destination string `toml:"destination"`

	Log telegraf.Logger `toml:"-"`
	// contains filtered or unexported fields
}

func NewLogFile

func NewLogFile() *LogFile

func (*LogFile) Description

func (t *LogFile) Description() string

func (*LogFile) FindLogSrc

func (t *LogFile) FindLogSrc() []logs.LogSrc

Try to find if there is any new file needs to be added for monitoring.

func (*LogFile) Gather

func (t *LogFile) Gather(acc telegraf.Accumulator) error

func (*LogFile) SampleConfig

func (t *LogFile) SampleConfig() string

func (*LogFile) Start

func (t *LogFile) Start(acc telegraf.Accumulator) error

func (*LogFile) Stop

func (t *LogFile) Stop()

type LogFilter added in v1.247350.0

type LogFilter struct {
	Type       string `toml:"type"`
	Expression string `toml:"expression"`
	// contains filtered or unexported fields
}

func (*LogFilter) ShouldPublish added in v1.247350.0

func (filter *LogFilter) ShouldPublish(event logs.LogEvent) bool

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL