syntax

package
v0.0.0-...-535c093 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jul 4, 2020 License: MIT Imports: 8 Imported by: 2

Documentation

Overview

Package syntax provides lexing and parsing from GoCaml source code into abstract syntax tree.

Index

Examples

Constants

This section is empty.

Variables

This section is empty.

Functions

func Parse

func Parse(src *locerr.Source) (*ast.AST, error)
Example
file := filepath.FromSlash("../testdata/from-mincaml/ack.ml")
src, err := locerr.NewSourceFromFile(file)
if err != nil {
	// File not found
	panic(err)
}

// Create lexer instance for the source
lex := NewLexer(src)
go lex.Lex()

// ParseTokens() takes channel of token which is usually given from lexer
// And returns the root of AST.
tree, err := ParseTokens(lex.Tokens)
if err != nil {
	// When parse failed
	panic(err)
}

fmt.Printf("AST: %v\n", tree)

// If you want to parse a source code into AST directly, simply call Parse() function.
tree, err = Parse(src)
if err != nil {
	// When lexing or parsing failed
	panic(err)
}

fmt.Printf("AST: %v\n", tree)
Output:

func ParseTokens

func ParseTokens(tokens chan token.Token) (*ast.AST, error)

ParseTokens parses given tokens and returns parsed AST. Tokens are passed via channel.

Types

type Lexer

type Lexer struct {
	Tokens chan token.Token

	// Function called when error occurs.
	// By default it outputs an error to stderr.
	Error func(msg string, pos locerr.Pos)
	// contains filtered or unexported fields
}

Lexer instance which contains lexing states.

func NewLexer

func NewLexer(src *locerr.Source) *Lexer

NewLexer creates new Lexer instance.

func (*Lexer) Lex

func (l *Lexer) Lex()

Lex starts lexing. Lexed tokens will be queued into channel in lexer.

Example
file := filepath.FromSlash("../testdata/from-mincaml/ack.ml")
src, err := locerr.NewSourceFromFile(file)
if err != nil {
	// File not found
	panic(err)
}

lex := NewLexer(src)

// Start to lex the source in other goroutine
go lex.Lex()

// tokens will be sent from lex.Tokens channel
for {
	select {
	case tok := <-lex.Tokens:
		switch tok.Kind {
		case token.ILLEGAL:
			fmt.Printf("Lexing invalid token at %v\n", tok.Start)
			return
		case token.EOF:
			fmt.Println("End of input")
			return
		default:
			fmt.Printf("Token: %s", tok.String())
		}
	}
}
Output:

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL