config

package
v0.2.3-0...-5ed8acd Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 26, 2022 License: MIT Imports: 6 Imported by: 0

Documentation

Overview

Package config holds structs and routines for loading application and pipes configuration.

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type GlobalConfig

type GlobalConfig struct {
	// RabbitDSN is DSN for RabbitMQ instance to consume messages from
	RabbitDSN string `envconfig:"RABBIT_DSN"`
	// StorageDSN is DSN for persistent storage used in case of Kafka unavailability. Example:
	//  redis://redis.local/?key=storage:key
	StorageDSN string `envconfig:"STORAGE_DSN"`

	// Log contains configuration values logging
	Log logging.LogConfig
	// Kafka contains configuration values for Kafka
	Kafka KafkaConfig
	// Stats contains configuration values for stats
	Stats StatsConfig
	// Worker contains configuration values for actual bridge worker
	Worker WorkerConfig
}

GlobalConfig contains application configuration values

func Load

func Load(configPath string) (*GlobalConfig, error)

Load loads config values from file, fallback to load from environment variables if file is not found or failed to read

func LoadConfigFromEnv

func LoadConfigFromEnv() (*GlobalConfig, error)

LoadConfigFromEnv loads config values from environment variables

type KafkaConfig

type KafkaConfig struct {
	// Brokers is Kafka brokers comma-separated list, e.g. "192.168.0.1:9092,192.168.0.2:9092"
	Brokers []string `envconfig:"KAFKA_BROKERS"`
	// MaxRetry is total number of times to retry sending a message to Kafka, default is 5
	MaxRetry int `envconfig:"KAFKA_MAX_RETRY"`
	// PipesConfig is a path to rabbit-kafka bridge mappings config.
	// This must be YAML file with the following structure:
	//
	//  ---
	//  - rabbitmq_exchange_name: "customers"     # Message from that RabbitMQ exchange
	//    rabbitmq_routing_key:   "order.created" # With that routing key
	//    kafka_topic:            "new-orders"    # Will be placed to that kafka topic
	//    # The queue name can be whatever you want, just keep it unique within pipes.
	//    # If you launch multiple kandalf instances they all will consume messages from that queue.
	//    rabbitmq_queue_name:    "kandalf-customers-order.created"
	//  - kafka_topic:            "loyalty"
	//    rabbitmq_exchange_name: "customers"
	//    rabbitmq_routing_key:   "badge.received"
	//    rabbitmq_queue_name:    "kandalf-customers-badge.received"
	//
	// Default path is "/etc/kandalf/conf/pipes.yml".
	PipesConfig string `envconfig:"KAFKA_PIPES_CONFIG"`
}

KafkaConfig contains application configuration values for Kafka

type Pipe

type Pipe struct {
	KafkaTopic              string
	RabbitExchangeName      string
	RabbitTransientExchange bool
	RabbitRoutingKey        []string
	RabbitQueueName         string
	RabbitDurableQueue      bool
	RabbitAutoDeleteQueue   bool
}

Pipe contains settings for single bridge pipe between Kafka and RabbitMQ

func LoadPipesFromFile

func LoadPipesFromFile(pipesConfigPath string) ([]Pipe, error)

LoadPipesFromFile loads pipes config from file

func (Pipe) String

func (p Pipe) String() string

type StatsConfig

type StatsConfig struct {
	DSN           string `envconfig:"STATS_DSN"`
	ErrorsSection string `envconfig:"STATS_ERRORS_SECTION"`
	Port          int    `envconfig:"STATS_PORT"`
}

StatsConfig contains application configuration values for stats. For details - read docs for github.com/hellofresh/stats-go package

type WorkerConfig

type WorkerConfig struct {
	// CycleTimeout is worker cycle sleep time to avoid CPU overload
	CycleTimeout time.Duration `envconfig:"WORKER_CYCLE_TIMEOUT"`
	// CacheSize is max messages number that we store in memory before trying to publish to Kafka
	CacheSize int `envconfig:"WORKER_CACHE_SIZE"`
	// CacheFlushTimeout is max amount of time we store messages in memory before trying to publish to Kafka
	CacheFlushTimeout time.Duration `envconfig:"WORKER_CACHE_FLUSH_TIMEOUT"`
	// ReadTimeout is timeout between attempts of reading persisted messages from storage
	// to publish them to Kafka, must be at least 2x greater than CycleTimeout
	StorageReadTimeout time.Duration `envconfig:"WORKER_STORAGE_READ_TIMEOUT"`
	// StorageMaxErrors is max storage read errors in a row before worker stops trying reading in current
	// read cycle. Next read cycle will be in "StorageReadTimeout" interval.
	StorageMaxErrors int `envconfig:"WORKER_STORAGE_MAX_ERRORS"`
}

WorkerConfig contains application configuration values for actual bridge worker

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL