exasol

package module
v0.0.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Oct 9, 2020 License: MIT Imports: 24 Imported by: 0

README

go-exasol-client

A simple Go library for connecting to Exasol

This is based on the Exasol websocket API so it requires Exasol versions 6.0 and above.

Examples

import "github.com/grantstreetgroup/go-exasol-client"

func main() {
    conf := exasol.ConnConf{
        Host:     "cluster-node", // IP ranges not yet supported
        Port:     8563,
        Username: "user",
        Password: "pass",
    }
    conn = exasol.Connect(conf)
    defer conn.Disconnect()

    conn.DisableAutoCommit()

    conn.Execute("ALTER SESSION SET...")

    _, err := conn.Execute("INSERT INTO t VALUES(?,?,?)", [][]interface{}{...})

    res, err := conn.FetchSlice("SELECT * FROM t WHERE c = ?", []interface{}{...})
    for _, row := range res {
        col = row[0].(string)
    }

    // For large datasets use FetchChan to avoid buffering
    // the entire resultset in memory
    res, err = conn.FetchChan("SELECT * FROM t")
    for row := range res {
        col = row[0].(string)
    }


    // For very large datasets you can send/receive your data
    // in CSV format (stored in a bytes.Buffer) using the Bulk* methods.
    // This is the fastest way to upload or download data to Exasol.
    var csvData new(bytes.Buffer)
    csvData.WriteString("csv,data,...\n...")

    // To upload to a particular table
    err = conn.BulkInsert(schemaName, tableName, csvData)

    // To select all data from a particular table
    err = conn.BulkSelect(schemaName, tableName, csvData)
    SomeCSVParser(csvData.String())

    // To select an arbitrary query
    sql := "EXPORT (SELECT c FROM t) INTO CSV AT '%%s' FILE 'data.csv'"
    err = conn.BulkQuery(sql, csvData)
    SomeCSVParser(csvData.String())


    // For extremely large datasets that cannot fit in memory
    // You can stream your CSV data to/from any of the above Bulk methods
    // by using the equivalent Stream... method.
    // The stream consists of a chan of byte slices where each byte
    // slice is optimally around 8K in size
    csvChan := make(chan []byte, 1000) // Chan size depends on your memory
    go func {
        for {
            ...
            // Generate your CSV data in ~8K chunks
            csvChan <- []byte("csv,data...\n...")
        }
        close(csvChan)
    }
    err := conn.StreamInsert(schemaName, tableName, csvChan)


    go func {
        // Read your CSV data in ~8K chunks
        for chunk := range csvChan {
            // chunk is a []byte with partial csvData
        }
    }
    bytesRead, err := conn.StreamSelect(schemaName, tableName, csvChan)


    conn.Commit()
}

TODOs

  • This library needs to be adapted to the standard Go database/sql interface.
  • Add support for cluster node IP address ranges
  • Add support encryption and compression
  • Implement timeouts for all query types
  • Add tests

Author

Grant Street Group developers@grantstreet.com

This software is Copyright (c) 2019 by Grant Street Group.

This is free software, licensed under:

MIT License

Contributors

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func QuoteStr

func QuoteStr(str string) string

func Transpose

func Transpose(matrix [][]interface{}) [][]interface{}

Types

type Conn

type Conn struct {
	Conf      ConnConf
	SessionID uint64
	Stats     map[string]int
	// contains filtered or unexported fields
}

func Connect

func Connect(conf ConnConf) *Conn

func (*Conn) BulkExecute

func (c *Conn) BulkExecute(sql string, data *bytes.Buffer) error

func (*Conn) BulkInsert

func (c *Conn) BulkInsert(schema, table string, data *bytes.Buffer) (err error)

func (*Conn) BulkQuery

func (c *Conn) BulkQuery(sql string, data *bytes.Buffer) error

func (*Conn) BulkSelect

func (c *Conn) BulkSelect(schema, table string, data *bytes.Buffer) (err error)

func (*Conn) Commit

func (c *Conn) Commit() error

func (*Conn) DisableAutoCommit

func (c *Conn) DisableAutoCommit()

func (*Conn) Disconnect

func (c *Conn) Disconnect()

func (*Conn) EnableAutoCommit

func (c *Conn) EnableAutoCommit()

func (*Conn) Execute

func (c *Conn) Execute(sql string, args ...interface{}) (map[string]interface{}, error)

TODO change optional args into an ExecConf struct Optional args are binds, default schema, colDefs, isColumnar flag

func (*Conn) FetchChan

func (c *Conn) FetchChan(sql string, args ...interface{}) (<-chan []interface{}, error)

func (*Conn) FetchSlice

func (c *Conn) FetchSlice(sql string, args ...interface{}) (res [][]interface{}, err error)

For large datasets use FetchChan to avoid buffering all the data in memory

func (*Conn) GetSessionAttr

func (c *Conn) GetSessionAttr() (map[string]interface{}, error)

func (*Conn) QuoteIdent

func (c *Conn) QuoteIdent(ident string) string

func (*Conn) Rollback

func (c *Conn) Rollback() error

func (*Conn) StreamExecute

func (c *Conn) StreamExecute(origSQL string, data <-chan []byte) error

func (*Conn) StreamInsert

func (c *Conn) StreamInsert(schema, table string, data <-chan []byte) (err error)

func (*Conn) StreamQuery

func (c *Conn) StreamQuery(origSQL string, data chan<- []byte) (int64, error)

func (*Conn) StreamSelect

func (c *Conn) StreamSelect(schema, table string, data chan<- []byte) (int64, error)

type ConnConf

type ConnConf struct {
	Host          string
	Port          uint16
	Username      string
	Password      string
	ClientName    string
	Timeout       uint32 // In Seconds
	SuppressError bool   // Server errors are logged to Error by default
	// TODO try compressionEnabled: true
	LogLevel string
}

type DataType

type DataType struct {
	Type    string `json:"type"`
	Size    int    `json:"size"`
	Prec    int    `json:"precision"`
	Scale   int    `json:"scale"`
	CharSet string `json:"characterSet,omitempty"`
}

type Proxy

type Proxy struct {
	Host string
	Port uint32
	// contains filtered or unexported fields
}

func NewProxy

func NewProxy(host string, port uint16, log *logging.Logger) (*Proxy, error)

NewProxy must never be used concurrently with the creating Conn. Violating this rule will introduce data races into your program.

func (*Proxy) Read

func (p *Proxy) Read(data chan<- []byte) (int64, error)

func (*Proxy) Shutdown

func (p *Proxy) Shutdown()

func (*Proxy) Write

func (p *Proxy) Write(data <-chan []byte) (bool, error)

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL