c6: github.com/c9s/c6/lexer Index | Files

package lexer

import "github.com/c9s/c6/lexer"

Index

Package Files

lexer.go lexer_at_rule.go lexer_comment.go lexer_expr.go lexer_func.go lexer_identifier.go lexer_interpolation.go lexer_property.go lexer_selector.go lexer_start.go lexer_state.go lexer_string.go lexer_var.go test_utils.go

Constants

const DIGITS = "1234567890"
const EOF = -1
const LETTERS = "zxcvbnmasdfghjklqwertyuiop"
const TOKEN_CHANNEL_BUFFER = 1024

func AssertLexerTokenSequence Uses

func AssertLexerTokenSequence(t *testing.T, scss string, tokenList []ast.TokenType)

func AssertLexerTokenSequenceFromState Uses

func AssertLexerTokenSequenceFromState(t *testing.T, scss string, fn stateFn, tokenList []ast.TokenType)

func AssertTokenSequence Uses

func AssertTokenSequence(t *testing.T, l *Lexer, tokenList []ast.TokenType) []ast.Token

func AssertTokenType Uses

func AssertTokenType(t *testing.T, tokenType ast.TokenType, token *ast.Token)

func IsCombinatorToken Uses

func IsCombinatorToken(r rune) bool

does not test ' '

func IsInterpolationStartToken Uses

func IsInterpolationStartToken(r rune, r2 rune) bool

func IsSelector Uses

func IsSelector(t ast.TokenType) bool

func IsSelectorStopToken Uses

func IsSelectorStopToken(r rune) bool

* Pass peek() rune to check if it's a selector stop token

func OutputGreen Uses

func OutputGreen(t *testing.T, msg string, args ...interface{})

func OutputRed Uses

func OutputRed(t *testing.T, msg string, args ...interface{})

type Lexer Uses

type Lexer struct {
    // lex input
    Input string

    // current buffer offset
    Offset int

    // the offset where token starts
    Start int

    // byte width of the current rune (utf8 character has more than one bytes)
    // The width will be updated by 'next()` method
    // `backup()` use Width to go back to the last offset.
    Width int

    // After the next() is called, the original width is backed up in
    // LastWidth
    LastWidth int

    // rollback offset for token
    RollbackOffset int

    // current lexer file
    File string

    // current lexer state
    State stateFn

    // current line number of the input
    Line int

    // character offset from the begining of line
    LineOffset int

    // the token output channel
    Output chan *ast.Token

    Tokens []ast.Token
}

func NewLexerWithBytes Uses

func NewLexerWithBytes(data []byte) *Lexer

* Create a lexer object with bytes

func NewLexerWithFile Uses

func NewLexerWithFile(file string) (*Lexer, error)

* Create a lexer object with file path

TODO: detect encoding here

func NewLexerWithString Uses

func NewLexerWithString(body string) *Lexer

* Create a lexer object with string

func (*Lexer) Close Uses

func (l *Lexer) Close()

func (*Lexer) DispatchFn Uses

func (l *Lexer) DispatchFn(fn stateFn) stateFn

func (*Lexer) Dump Uses

func (l *Lexer) Dump()

func (*Lexer) Run Uses

func (l *Lexer) Run()

func (*Lexer) RunFrom Uses

func (l *Lexer) RunFrom(fn stateFn)

func (*Lexer) TokenStream Uses

func (l *Lexer) TokenStream() ast.TokenStream

Package lexer imports 8 packages (graph) and is imported by 2 packages. Updated 2017-10-13. Refresh now. Tools for package owners.