shlex

package module
v0.0.0-...-38f4b40 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 14, 2020 License: MIT Imports: 5 Imported by: 131

README

go-shlex

go-shlex is a library to make a lexical analyzer like Unix shell for Go.

Install

go get -u "github.com/anmitsu/go-shlex"

Usage

package main

import (
    "fmt"
    "log"

    "github.com/anmitsu/go-shlex"
)

func main() {
    cmd := `cp -Rdp "file name" 'file name2' dir\ name`
    words, err := shlex.Split(cmd, true)
    if err != nil {
        log.Fatal(err)
    }

    for _, w := range words {
        fmt.Println(w)
    }
}

output

cp
-Rdp
file name
file name2
dir name

Documentation

http://godoc.org/github.com/anmitsu/go-shlex

Documentation

Overview

Package shlex provides a simple lexical analysis like Unix shell.

Index

Examples

Constants

This section is empty.

Variables

View Source
var (
	ErrNoClosing = errors.New("No closing quotation")
	ErrNoEscaped = errors.New("No escaped character")
)

Functions

func Split

func Split(s string, posix bool) ([]string, error)

Split splits a string according to posix or non-posix rules.

Example
cmd := `cp -Rdp "file name" 'file name2' dir\ name`

// Split of cmd with POSIX mode.
words1, err := shlex.Split(cmd, true)
if err != nil {
	log.Fatal(err)
}
// Split of cmd with Non-POSIX mode.
words2, err := shlex.Split(cmd, false)
if err != nil {
	log.Fatal(err)
}

fmt.Println("Source command:")
fmt.Println(`cp -Rdp "file name" 'file name2' dir\ name`)
fmt.Println()

fmt.Println("POSIX mode:")
for _, word := range words1 {
	fmt.Println(word)
}
fmt.Println()
fmt.Println("Non-POSIX mode:")
for _, word := range words2 {
	fmt.Println(word)
}
Output:

Source command:
cp -Rdp "file name" 'file name2' dir\ name

POSIX mode:
cp
-Rdp
file name
file name2
dir name

Non-POSIX mode:
cp
-Rdp
"file name"
'file name2'
dir\
name

Types

type DefaultTokenizer

type DefaultTokenizer struct{}

DefaultTokenizer implements a simple tokenizer like Unix shell.

func (*DefaultTokenizer) IsEscape

func (t *DefaultTokenizer) IsEscape(r rune) bool

func (*DefaultTokenizer) IsEscapedQuote

func (t *DefaultTokenizer) IsEscapedQuote(r rune) bool

func (*DefaultTokenizer) IsQuote

func (t *DefaultTokenizer) IsQuote(r rune) bool

func (*DefaultTokenizer) IsWhitespace

func (t *DefaultTokenizer) IsWhitespace(r rune) bool

func (*DefaultTokenizer) IsWord

func (t *DefaultTokenizer) IsWord(r rune) bool

type Lexer

type Lexer struct {
	// contains filtered or unexported fields
}

Lexer represents a lexical analyzer.

func NewLexer

func NewLexer(r io.Reader, posix, whitespacesplit bool) *Lexer

NewLexer creates a new Lexer reading from io.Reader. This Lexer has a DefaultTokenizer according to posix and whitespacesplit rules.

func NewLexerString

func NewLexerString(s string, posix, whitespacesplit bool) *Lexer

NewLexerString creates a new Lexer reading from a string. This Lexer has a DefaultTokenizer according to posix and whitespacesplit rules.

func (*Lexer) SetTokenizer

func (l *Lexer) SetTokenizer(t Tokenizer)

SetTokenizer sets a Tokenizer.

func (*Lexer) Split

func (l *Lexer) Split() ([]string, error)

type Tokenizer

type Tokenizer interface {
	IsWord(rune) bool
	IsWhitespace(rune) bool
	IsQuote(rune) bool
	IsEscape(rune) bool
	IsEscapedQuote(rune) bool
}

Tokenizer is the interface that classifies a token according to words, whitespaces, quotations, escapes and escaped quotations.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL