Clean up TODOs, Fix lexer
This commit is contained in:
parent
a7007eaf0f
commit
e7cdf0c929
@ -358,7 +358,6 @@ func (c *Compiler) compileStatementWAT(stmt Statement, block *Block) (string, er
|
|||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: upcast to return type for non-primitive types
|
|
||||||
return wat + "return\n", nil
|
return wat + "return\n", nil
|
||||||
case Statement_DeclareLocalVariable:
|
case Statement_DeclareLocalVariable:
|
||||||
dlv := stmt.Value.(DeclareLocalVariableStatement)
|
dlv := stmt.Value.(DeclareLocalVariableStatement)
|
||||||
|
51
lexer.go
51
lexer.go
@ -38,7 +38,7 @@ const (
|
|||||||
|
|
||||||
type Separator uint32
|
type Separator uint32
|
||||||
|
|
||||||
var Separators []rune = []rune{'(', ')', '{', '}', '[', ']', ';', ',', '.'}
|
var Separators []string = []string{"(", ")", "{", "}", "[", "]", ";", ",", "."}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
Separator_OpenParen Separator = iota
|
Separator_OpenParen Separator = iota
|
||||||
@ -145,6 +145,42 @@ func (l *Lexer) tryOperator() Operator {
|
|||||||
return foundOp
|
return foundOp
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (l *Lexer) trySeparator() Separator {
|
||||||
|
var foundSep Separator = InvalidValue
|
||||||
|
var foundSepLen int = 0
|
||||||
|
|
||||||
|
str := string(l.Runes)
|
||||||
|
for i, separator := range Separators {
|
||||||
|
separatorLen := len([]rune(separator))
|
||||||
|
if separatorLen <= foundSepLen {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasPrefix(str, separator) {
|
||||||
|
foundSep = Separator(i)
|
||||||
|
foundSepLen = len([]rune(separator))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < foundSepLen; i++ {
|
||||||
|
l.nextRune()
|
||||||
|
}
|
||||||
|
|
||||||
|
return foundSep
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *Lexer) hasNext(choices ...string) bool {
|
||||||
|
str := string(l.Runes)
|
||||||
|
|
||||||
|
for _, c := range choices {
|
||||||
|
if strings.HasPrefix(str, c) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
func (l *Lexer) nextRune() *rune {
|
func (l *Lexer) nextRune() *rune {
|
||||||
if len(l.Runes) == 0 {
|
if len(l.Runes) == 0 {
|
||||||
return nil
|
return nil
|
||||||
@ -227,10 +263,15 @@ func (l *Lexer) nextToken() (*LexToken, error) {
|
|||||||
return &LexToken{Type: Type_Operator, Position: TokenPosition{SourceFile: l.SourceFile, Position: l.LastTokenPosition}, Value: op}, nil
|
return &LexToken{Type: Type_Operator, Position: TokenPosition{SourceFile: l.SourceFile, Position: l.LastTokenPosition}, Value: op}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
sep := l.trySeparator()
|
||||||
|
if sep != InvalidValue {
|
||||||
|
return &LexToken{Type: Type_Separator, Position: TokenPosition{SourceFile: l.SourceFile, Position: l.LastTokenPosition}, Value: sep}, nil
|
||||||
|
}
|
||||||
|
|
||||||
token := ""
|
token := ""
|
||||||
for {
|
for {
|
||||||
r := l.peekRune()
|
r := l.peekRune()
|
||||||
if r == nil || slices.Contains(Whitespace, *r) || slices.Contains(Separators, *r) {
|
if r == nil || slices.Contains(Whitespace, *r) || l.hasNext(Separators...) || l.hasNext(Operators...) {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -278,12 +319,6 @@ func (l *Lexer) nextToken() (*LexToken, error) {
|
|||||||
return &LexToken{Type: Type_Literal, Position: TokenPosition{SourceFile: l.SourceFile, Position: l.LastTokenPosition}, Value: Literal{Type: Literal_Number, Primitive: numberType, Value: number}}, nil
|
return &LexToken{Type: Type_Literal, Position: TokenPosition{SourceFile: l.SourceFile, Position: l.LastTokenPosition}, Value: Literal{Type: Literal_Number, Primitive: numberType, Value: number}}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(runes) == 1 {
|
|
||||||
if idx := slices.Index(Separators, runes[0]); idx != -1 {
|
|
||||||
return &LexToken{Type: Type_Separator, Position: TokenPosition{SourceFile: l.SourceFile, Position: l.LastTokenPosition}, Value: Separator(idx)}, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if idx := slices.Index(Keywords, token); idx != -1 {
|
if idx := slices.Index(Keywords, token); idx != -1 {
|
||||||
return &LexToken{Type: Type_Keyword, Position: TokenPosition{SourceFile: l.SourceFile, Position: l.LastTokenPosition}, Value: Keyword(idx)}, nil
|
return &LexToken{Type: Type_Keyword, Position: TokenPosition{SourceFile: l.SourceFile, Position: l.LastTokenPosition}, Value: Keyword(idx)}, nil
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user