Documentation ¶
Index ¶
- Constants
- func CountNewlines(value string) int
- func DescribeToken(token Token) string
- func DescribeTokenExpr(expr string) string
- type EnvLexerInformation
- type Failure
- type Lexer
- func (l *Lexer) Tokeniter(source string, name *string, filename *string, state *string) (ret []tokenRaw, err error)
- func (l *Lexer) Tokenize(source string, name *string, filename *string, state *string) (*TokenStream, error)
- func (l *Lexer) Wrap(stream []tokenRaw, name *string, filename *string) ([]Token, error)
- type OptionalLStrip
- type Token
- type TokenStream
- func (ts TokenStream) Bool() bool
- func (ts *TokenStream) Close()
- func (ts TokenStream) Current() Token
- func (ts TokenStream) Eos() bool
- func (ts *TokenStream) Expect(expr string) (*Token, error)
- func (ts TokenStream) Look() Token
- func (ts *TokenStream) Next() Token
- func (ts *TokenStream) NextIf(expr string) *Token
- func (ts *TokenStream) Skip(n int)
- func (ts *TokenStream) SkipIf(expr string) bool
Constants ¶
View Source
const TokenAdd = "add"
View Source
const TokenAssign = "assign"
View Source
const TokenBlockBegin = "block_begin"
View Source
const TokenBlockEnd = "block_end"
View Source
const TokenColon = "colon"
View Source
const TokenComma = "comma"
View Source
const TokenComment = "comment"
View Source
const TokenCommentBegin = "comment_begin"
View Source
const TokenCommentEnd = "comment_end"
View Source
const TokenData = "data"
View Source
const TokenDiv = "div"
View Source
const TokenDot = "dot"
View Source
const TokenEOF = "eof"
View Source
const TokenEq = "eq"
View Source
const TokenFloat = "float"
View Source
const TokenFloordiv = "floordiv"
View Source
const TokenGt = "gt"
View Source
const TokenGteq = "gteq"
View Source
const TokenInitial = "initial"
View Source
const TokenInteger = "integer"
View Source
const TokenLBrace = "lbrace"
View Source
const TokenLBracket = "lbracket"
View Source
const TokenLParen = "lparen"
View Source
const TokenLinecomment = "linecomment"
View Source
const TokenLinecommentBegin = "linecomment_begin"
View Source
const TokenLinecommentEnd = "linecomment_end"
View Source
const TokenLinestatementBegin = "linestatement_begin"
View Source
const TokenLinestatementEnd = "linestatement_end"
View Source
const TokenLt = "lt"
View Source
const TokenLteq = "lteq"
View Source
const TokenMod = "mod"
View Source
const TokenMul = "mul"
View Source
const TokenName = "name"
View Source
const TokenNe = "ne"
View Source
const TokenOperator = "operator"
View Source
const TokenPipe = "pipe"
View Source
const TokenPow = "pow"
View Source
const TokenRBrace = "rbrace"
View Source
const TokenRBracket = "rbracket"
View Source
const TokenRParen = "rparen"
View Source
const TokenRawBegin = "raw_begin"
View Source
const TokenRawEnd = "raw_end"
View Source
const TokenSemicolon = "semicolon"
View Source
const TokenString = "string"
View Source
const TokenSub = "sub"
View Source
const TokenTilde = "tilde"
View Source
const TokenVariableBegin = "variable_begin"
View Source
const TokenVariableEnd = "variable_end"
View Source
const TokenWhitespace = "whitespace"
Variables ¶
This section is empty.
Functions ¶
func CountNewlines ¶
func DescribeToken ¶
func DescribeTokenExpr ¶
Types ¶
type EnvLexerInformation ¶
type EnvLexerInformation struct { BlockStartString string BlockEndString string VariableStartString string VariableEndString string CommentStartString string CommentEndString string LineStatementPrefix *string LineCommentPrefix *string TrimBlocks bool LStripBlocks bool NewlineSequence string KeepTrailingNewline bool }
func DefaultEnvLexerInformation ¶
func DefaultEnvLexerInformation() *EnvLexerInformation
type Failure ¶
type Failure struct {
// contains filtered or unexported fields
}
Failure is used by the `Lexer` to specify known errors.
type Lexer ¶
type Lexer struct {
// contains filtered or unexported fields
}
Lexer is a struct that implements a lexer for a given environment. Automatically created by the environment class, usually you don't have to do that.
Note that the lexer is not automatically bound to an environment. Multiple environments can share the same lexer.
func GetLexer ¶
func GetLexer(env *EnvLexerInformation) *Lexer
func New ¶
func New(env *EnvLexerInformation) *Lexer
func (*Lexer) Tokeniter ¶
func (l *Lexer) Tokeniter(source string, name *string, filename *string, state *string) (ret []tokenRaw, err error)
Tokeniter tokenizes the text and returns the tokens. Use this method if you just want to tokenize a template.
type OptionalLStrip ¶
type OptionalLStrip struct {
// contains filtered or unexported fields
}
OptionalLStrip is used for marking a point in the state that can have lstrip applied.
type TokenStream ¶
type TokenStream struct {
// contains filtered or unexported fields
}
func NewTokenStream ¶
func NewTokenStream(tokens []Token, name *string, filename *string) *TokenStream
func (TokenStream) Bool ¶
func (ts TokenStream) Bool() bool
func (*TokenStream) Close ¶
func (ts *TokenStream) Close()
func (TokenStream) Current ¶
func (ts TokenStream) Current() Token
func (TokenStream) Eos ¶
func (ts TokenStream) Eos() bool
func (TokenStream) Look ¶
func (ts TokenStream) Look() Token
func (*TokenStream) Next ¶
func (ts *TokenStream) Next() Token
func (*TokenStream) NextIf ¶
func (ts *TokenStream) NextIf(expr string) *Token
func (*TokenStream) Skip ¶
func (ts *TokenStream) Skip(n int)
func (*TokenStream) SkipIf ¶
func (ts *TokenStream) SkipIf(expr string) bool
Click to show internal directories.
Click to hide internal directories.