Documentation ¶
Index ¶
- Constants
- Variables
- func InList(s Token, test ...Token) bool
- func IsSymbol(s string) bool
- type Token
- func NewFloatToken(spelling string) Token
- func NewIdentifierToken(spelling string) Token
- func NewIntegerToken(spelling string) Token
- func NewReservedToken(spelling string) Token
- func NewSpecialToken(spelling string) Token
- func NewStringToken(spelling string) Token
- func NewToken(class TokenClass, spelling string) Token
- func NewTypeToken(spelling string) Token
- func NewValueToken(spelling string) Token
- func (t Token) Boolean() bool
- func (t Token) Float() float64
- func (t Token) Integer() int64
- func (t Token) IsClass(class TokenClass) bool
- func (t Token) IsIdentifier() bool
- func (t Token) IsName() bool
- func (t Token) IsReserved(includeExtensions bool) bool
- func (t Token) IsString() bool
- func (t Token) IsValue() bool
- func (t Token) Spelling() string
- func (t Token) String() string
- type TokenClass
- type Tokenizer
- func (t *Tokenizer) Advance(p int)
- func (t *Tokenizer) AnyNext(test ...Token) bool
- func (t *Tokenizer) AtEnd() bool
- func (t *Tokenizer) DumpTokens(before, after int)
- func (t *Tokenizer) GetLine(line int) string
- func (t *Tokenizer) GetSource() string
- func (t *Tokenizer) GetTokens(pos1, pos2 int, spacing bool) string
- func (t *Tokenizer) IsNext(test Token) bool
- func (t *Tokenizer) Mark() int
- func (t *Tokenizer) Next() Token
- func (t *Tokenizer) NextText() string
- func (t *Tokenizer) Peek(offset int) Token
- func (t *Tokenizer) PeekText(offset int) string
- func (t *Tokenizer) Remainder() string
- func (t *Tokenizer) Reset()
- func (t *Tokenizer) Set(mark int)
- func (t *Tokenizer) SetLineNumber(line int) error
Constants ¶
const ToTheEnd = 999999
ToTheEnd means to advance the token stream to the end.
Variables ¶
var ( // "assert" token. AssertToken = NewReservedToken("assert") // "bool" token. BoolToken = NewTypeToken("bool") // "{" token. BlockBeginToken = NewSpecialToken("{") // "}" token. BlockEndToken = NewSpecialToken("}") // "break" token. BreakToken = NewReservedToken("break") // "byte" token. ByteToken = NewTypeToken("byte") // "call" token. CallToken = NewReservedToken("call") // "case" token. CaseToken = NewIdentifierToken("case") // "catch" token. CatchToken = NewReservedToken("catch") // "chan" token. ChanToken = NewTypeToken("chan") // "clear" token. ClearToken = NewIdentifierToken("clear") // "const" token. ConstToken = NewReservedToken("const") // "continue" token. ContinueToken = NewReservedToken("continue") // "{" token. DataBeginToken = NewSpecialToken("{") // "}" token. DataEndToken = NewSpecialToken("}") // "default" token. DefaultToken = NewIdentifierToken("default") // "defer" token. DeferToken = NewReservedToken("defer") // "@" token. DirectiveToken = NewSpecialToken("@") // "else" token. ElseToken = NewReservedToken("else") // "{}" token. EmptyBlockToken = NewSpecialToken("{}") // "{}" token. EmptyInitializerToken = NewSpecialToken("{}") // "interface{}" token. EmptyInterfaceToken = NewTypeToken("interface{}") // "error" token. ErrorToken = NewIdentifierToken("error") // "exit" token. ExitToken = NewReservedToken("exit") // "fallthrough" token. FallthroughToken = NewReservedToken("fallthrough") // "float32" token. Float32Token = NewTypeToken("float32") // "float64" token. Float64Token = NewTypeToken("float64") // "for" token. ForToken = NewReservedToken("for") // "func" token. FuncToken = NewReservedToken("func") // "go" token. GoToken = NewReservedToken("go") // "if" token. IfToken = NewReservedToken("if") // "int" token. IntToken = NewTypeToken("int") // "int32" token. Int32Token = NewTypeToken("int32") // "int64" token. Int64Token = NewTypeToken("int64") // "interface" token. InterfaceToken = NewIdentifierToken("interface") // "import" token. ImportToken = NewReservedToken("import") // "make" token. MakeToken = NewReservedToken("make") // "map" token. MapToken = NewTypeToken("map") // "nil" token. NilToken = NewReservedToken("nil") // "package" token. PackageToken = NewReservedToken("package") // "panic" token. PanicToken = NewReservedToken("panic") // "print" token. PrintToken = NewReservedToken("print") // "range" token. RangeToken = NewIdentifierToken("range") // "return" token. ReturnToken = NewReservedToken("return") // "string" token. StringToken = NewTypeToken("string") // "struct" token. StructToken = NewTypeToken("struct") // "switch" token. SwitchToken = NewReservedToken("switch") // "test" token. TestToken = NewIdentifierToken("test") // "type" token. TypeToken = NewReservedToken("type") // "try" token. TryToken = NewReservedToken("try") // "var" token. VarToken = NewReservedToken("var") // "when" token. WhenToken = NewIdentifierToken("when") // ";" token. SemicolonToken = NewSpecialToken(";") // ":" token. ColonToken = NewSpecialToken(":") // ":=" token. DefineToken = NewSpecialToken(":=") // "=" token. AssignToken = NewSpecialToken("=") // "," token. CommaToken = NewSpecialToken(",") // "==" token. EqualsToken = NewSpecialToken("==") // ">" token. GreaterThanToken = NewSpecialToken(">") // ">=" token. GreaterThanOrEqualsToken = NewSpecialToken(">=") // "<" token. LessThanToken = NewSpecialToken("<") // "<=" token. LessThanOrEqualsToken = NewSpecialToken("<=") // "<<" token. ShiftLeftToken = NewSpecialToken("<<") // ">>" token. ShiftRightToken = NewSpecialToken(">>") // "!" token. NotToken = NewSpecialToken("!") // "!=" token. NotEqualsToken = NewSpecialToken("!=") // "%" token. ModuloToken = NewSpecialToken("%") // "^" token. ExponentToken = NewSpecialToken("^") // "+" token. AddToken = NewSpecialToken("+") // "-" token. SubtractToken = NewSpecialToken("-") // "*" token. MultiplyToken = NewSpecialToken("*") // "/" token. DivideToken = NewSpecialToken("/") // "*" token. PointerToken = NewSpecialToken("*") // '&" token. AddressToken = NewSpecialToken("&") // "&" token. AndToken = NewSpecialToken("&") // "|" token. OrToken = NewSpecialToken("|") // "&&" token. BooleanAndToken = NewSpecialToken("&&") // "||" token. BooleanOrToken = NewSpecialToken("||") // "+=" token. AddAssignToken = NewSpecialToken("+=") // "-=" token. SubtractAssignToken = NewSpecialToken("-=") // "*=" token. MultiplyAssignToken = NewSpecialToken("*=") // "/=" token. DivideAssignToken = NewSpecialToken("/=") // "++" token. IncrementToken = NewSpecialToken("++") // "--" token. DecrementToken = NewSpecialToken("--") // "." token. DotToken = NewSpecialToken(".") // "..." token. VariadicToken = NewSpecialToken("...") // "<-" token. ChannelReceiveToken = NewSpecialToken("<-") // "(" token. StartOfListToken = NewSpecialToken("(") // ")"" token. EndOfListToken = NewSpecialToken(")") // "[" token. StartOfArrayToken = NewSpecialToken("[") // "]" token. EndOfArrayToken = NewSpecialToken("]") // "?" token. OptionalToken = NewSpecialToken("?") // Empty token. EmptyToken = NewSpecialToken("") // "-" token. NegateToken = NewSpecialToken("-") )
Symbolic names for each string token value.
var EndOfTokens = Token{/* contains filtered or unexported fields */}
EndOfTokens is a reserved token that means end of the buffer was reached.
var ExtendedReservedWords = map[Token]bool{ CallToken: true, CatchToken: true, PrintToken: true, TryToken: true, ExitToken: true, PanicToken: true, }
ExtendedReservedWords are additional reserved words when running with language extensions enabled.
var ReservedWords = map[Token]bool{ BoolToken: true, BreakToken: true, ByteToken: true, ChanToken: true, ConstToken: true, ContinueToken: true, DeferToken: true, ElseToken: true, FallthroughToken: true, Float32Token: true, Float64Token: true, ForToken: true, FuncToken: true, GoToken: true, IfToken: true, ImportToken: true, InterfaceToken: true, IntToken: true, Int32Token: true, Int64Token: true, MapToken: true, NilToken: true, PackageToken: true, ReturnToken: true, SwitchToken: true, StringToken: true, StructToken: true, TypeToken: true, VarToken: true, }
ReservedWords is the list of reserved words in the _Ego_ language.
var SpecialTokens = map[Token]bool{ BlockBeginToken: true, BlockEndToken: true, DataBeginToken: true, DataEndToken: true, DirectiveToken: true, EmptyBlockToken: true, EmptyInitializerToken: true, SemicolonToken: true, ColonToken: true, DefineToken: true, AssignToken: true, CommaToken: true, EqualsToken: true, GreaterThanToken: true, GreaterThanOrEqualsToken: true, LessThanToken: true, LessThanOrEqualsToken: true, ShiftLeftToken: true, ShiftRightToken: true, NotToken: true, NotEqualsToken: true, ModuloToken: true, ExponentToken: true, AddToken: true, SubtractToken: true, MultiplyToken: true, DivideToken: true, PointerToken: true, AddressToken: true, AndToken: true, OrToken: true, BooleanAndToken: true, BooleanOrToken: true, AddAssignToken: true, SubtractAssignToken: true, MultiplyAssignToken: true, DivideAssignToken: true, IncrementToken: true, DecrementToken: true, DotToken: true, VariadicToken: true, ChannelReceiveToken: true, StartOfListToken: true, EndOfListToken: true, StartOfArrayToken: true, EndOfArrayToken: true, OptionalToken: true, EmptyToken: true, NegateToken: true, }
SpecialTokens is a list of tokens that are considered special symantic characters.
var TypeTokens = map[Token]bool{ BoolToken: true, ByteToken: true, IntToken: true, Int32Token: true, Int64Token: true, Float32Token: true, Float64Token: true, StringToken: true, StructToken: true, MapToken: true, }
TypeTokens is a list of tokens that represent type names.
Functions ¶
Types ¶
type Token ¶
type Token struct {
// contains filtered or unexported fields
}
Token defines a single token from the lexical scanning operation.
func NewFloatToken ¶
func NewIdentifierToken ¶
func NewIntegerToken ¶
func NewReservedToken ¶
func NewSpecialToken ¶
func NewStringToken ¶
func NewToken ¶
func NewToken(class TokenClass, spelling string) Token
func NewTypeToken ¶
func NewValueToken ¶
func (Token) IsClass ¶
func (t Token) IsClass(class TokenClass) bool
func (Token) IsIdentifier ¶
func (Token) IsReserved ¶
IsReserved indicates if a name is a reserved word.
type TokenClass ¶
type TokenClass int
const ( EndOfTokensClass TokenClass = iota IdentifierTokenClass TypeTokenClass StringTokenClass BooleanTokenClass IntegerTokenClass FloatTokenClass ReservedTokenClass SpecialTokenClass ValueTokenClass )
func (TokenClass) String ¶
func (c TokenClass) String() string
type Tokenizer ¶
Tokenizer is an instance of a tokenized string.
func New ¶
New creates a tokenizer instance and breaks the string up into an array of tokens. The isCode flag is used to indicate this is Ego code, which has some different tokenizing rules.
func (*Tokenizer) AnyNext ¶
AnyNext tests to see if the next token is in the given list of tokens, and if so advances and returns true, else does not advance and returns false.
func (*Tokenizer) DumpTokens ¶
func (*Tokenizer) GetLine ¶
GetLine returns a given line of text from the token stream. This actuals refers to the original line splits done when the source was first received.
func (*Tokenizer) GetTokens ¶
GetTokens returns a string representing the tokens within the given range of tokens.
func (*Tokenizer) IsNext ¶
IsNext tests to see if the next token is the given token, and if so advances and returns true, else does not advance and returns false.
func (*Tokenizer) NextText ¶
Next gets the next token in the tokenizer and returns it's text value as a string.
func (*Tokenizer) Remainder ¶
Remainder returns the rest of the source, as initially presented to the tokenizer, from the current token position. This allows the caller to get "the rest" of a command line or other element as needed. If the token position is invalid (i.e. past end-of-tokens, for example) then an empty string is returned.
func (*Tokenizer) Reset ¶
func (t *Tokenizer) Reset()
Reset sets the tokenizer back to the start of the token stream.
func (*Tokenizer) SetLineNumber ¶
Reset line numbers. This is done after a prolog that the user might not be aware of is injected, so errors reported during compilation or runtime reflect line numbers based on the @line specification rather than the actual literal line number.