elysium/lexer.go

275 lines
5.5 KiB
Go
Raw Normal View History

2024-03-10 22:48:57 +01:00
package main
import (
"errors"
"slices"
2024-03-11 22:05:36 +01:00
"strconv"
2024-03-10 22:48:57 +01:00
"strings"
2024-03-11 22:05:36 +01:00
"unicode"
2024-03-10 22:48:57 +01:00
)
var Whitespace []rune = []rune{' ', '\t', '\r', '\n'}
2024-03-11 22:05:36 +01:00
var Separators []rune = []rune{'(', ')', '{', '}', ';', ','}
var Operators []rune = []rune{'=', '>', '<', '!', '+', '-', '*', '/', '%'}
2024-03-10 22:48:57 +01:00
type LexType uint32
const (
Type_Identifier LexType = iota
Type_Keyword
Type_Separator
2024-03-11 22:05:36 +01:00
Type_Literal
2024-03-10 22:48:57 +01:00
)
type Keyword uint32
const (
Keyword_Import Keyword = iota
Keyword_Void
)
type Separator uint32
const (
Separator_OpenParen Separator = iota
Separator_CloseParen
Separator_OpenCurly
Separator_CloseCurly
Separator_Semicolon
2024-03-11 22:05:36 +01:00
Separator_Comma
2024-03-10 22:48:57 +01:00
)
type LiteralType uint32
const (
Literal_String LiteralType = iota
Literal_Number
)
type LexToken struct {
Type LexType
Position uint64
Value any
}
type Literal struct {
2024-03-11 22:05:36 +01:00
Type LiteralType
Primitive PrimitiveType
Value any
2024-03-10 22:48:57 +01:00
}
2024-03-13 17:17:09 +01:00
type Lexer struct {
Runes []rune
Position uint64
}
2024-03-10 22:48:57 +01:00
2024-03-13 17:17:09 +01:00
func (l *Lexer) error(message string) error {
return errors.New(message + " (at " + strconv.FormatUint(l.Position, 10) + ")")
}
2024-03-10 22:48:57 +01:00
2024-03-13 17:17:09 +01:00
func (l *Lexer) peekRune() *rune {
if len(l.Runes) == 0 {
return nil
}
return &l.Runes[0]
}
func (l *Lexer) nextRune() *rune {
if len(l.Runes) == 0 {
return nil
}
r := l.Runes[0]
l.Runes = l.Runes[1:]
l.Position++
return &r
}
2024-03-10 22:48:57 +01:00
2024-03-13 17:17:09 +01:00
func (l *Lexer) stringLiteral() (string, error) {
openQuote := l.nextRune()
if openQuote == nil || *openQuote != '"' {
return "", l.error("expected \"")
2024-03-10 22:48:57 +01:00
}
2024-03-13 17:17:09 +01:00
literal := ""
for {
r := l.nextRune()
if r == nil {
return "", l.error("unexpected end of file")
}
if *r == '"' {
break
}
if *r == '\\' {
escaped := l.nextRune()
if escaped == nil {
return "", l.error("unmatched escape sequence")
}
literal += string(*escaped)
continue
}
literal += string(*r)
2024-03-10 22:48:57 +01:00
}
2024-03-13 17:17:09 +01:00
return literal, nil
2024-03-10 22:48:57 +01:00
}
2024-03-13 17:17:09 +01:00
// TODO: maybe this method should directly return LexToken
func (l *Lexer) nextToken() (string, error) {
2024-03-10 22:48:57 +01:00
// Skip whitespace
2024-03-13 17:17:09 +01:00
for {
r := l.peekRune()
if r == nil {
return "", nil
}
if !slices.Contains(Whitespace, *r) {
break
}
l.nextRune()
2024-03-10 22:48:57 +01:00
}
2024-03-13 17:17:09 +01:00
r := l.peekRune()
if r == nil {
return "", nil
2024-03-10 22:48:57 +01:00
}
2024-03-13 17:17:09 +01:00
if *r == '"' {
literal, err := l.stringLiteral()
2024-03-10 22:48:57 +01:00
if err != nil {
2024-03-13 17:17:09 +01:00
return "", err
2024-03-10 22:48:57 +01:00
}
2024-03-13 17:17:09 +01:00
return "\"" + literal + "\"", nil
2024-03-10 22:48:57 +01:00
}
2024-03-13 17:17:09 +01:00
token := ""
for {
r := l.peekRune()
if r == nil || slices.Contains(Whitespace, *r) || slices.Contains(Separators, *r) {
break
}
token += string(*l.nextRune())
2024-03-10 22:48:57 +01:00
}
2024-03-13 17:17:09 +01:00
if len(token) == 0 && len(l.Runes) > 0 {
return string(*l.nextRune()), nil
2024-03-10 22:48:57 +01:00
}
2024-03-13 17:17:09 +01:00
return token, nil
2024-03-10 22:48:57 +01:00
}
2024-03-11 22:05:36 +01:00
func parseNumber(raw string, numberType PrimitiveType) (any, error) {
if isSignedInt(numberType) {
return strconv.ParseInt(raw, 10, getBits(numberType))
}
if isUnsignedInt(numberType) {
return strconv.ParseUint(raw, 10, getBits(numberType))
}
if isFloatingPoint(numberType) {
return strconv.ParseFloat(raw, getBits(numberType))
}
panic("Unhandled type (" + strconv.FormatUint(uint64(numberType), 10) + ") in parseNumber()")
}
2024-03-13 17:17:09 +01:00
func (l *Lexer) parseToken(token string) (*LexToken, error) {
2024-03-10 22:48:57 +01:00
if strings.HasPrefix(token, "\"") {
2024-03-13 17:17:09 +01:00
return &LexToken{Type: Type_Literal, Position: l.Position, Value: Literal{Type: Literal_String, Primitive: InvalidValue, Value: token[1 : len(token)-1]}}, nil
2024-03-11 22:05:36 +01:00
}
runes := []rune(token)
startsWithMinus := runes[0] == '-'
if startsWithMinus || unicode.IsDigit([]rune(token)[0]) {
// TODO: hexadecimal/binary/octal constants
var numberType PrimitiveType = InvalidValue
var rawNumber string = token
for i, name := range NumberTypeNames {
if strings.HasSuffix(token, name) {
numberType = PrimitiveType(i)
rawNumber = token[:len(token)-len(name)]
}
}
containsDot := slices.Contains(runes, '.')
if numberType == InvalidValue {
if containsDot {
numberType = Primitive_F64
} else if startsWithMinus {
numberType = Primitive_I64
} else {
numberType = Primitive_U64
}
}
if containsDot && !isFloatingPoint(numberType) {
return nil, errors.New("dot in non floating-point constant")
}
number, err := parseNumber(rawNumber, numberType)
if err != nil {
return nil, err
}
2024-03-13 17:17:09 +01:00
return &LexToken{Type: Type_Literal, Position: l.Position, Value: Literal{Type: Literal_Number, Primitive: numberType, Value: number}}, nil
2024-03-10 22:48:57 +01:00
}
switch token {
case "void":
2024-03-13 17:17:09 +01:00
return &LexToken{Type: Type_Keyword, Position: l.Position, Value: Keyword_Void}, nil
2024-03-10 22:48:57 +01:00
case "import":
2024-03-13 17:17:09 +01:00
return &LexToken{Type: Type_Keyword, Position: l.Position, Value: Keyword_Import}, nil
2024-03-10 22:48:57 +01:00
case "(":
2024-03-13 17:17:09 +01:00
return &LexToken{Type: Type_Separator, Position: l.Position, Value: Separator_OpenParen}, nil
2024-03-10 22:48:57 +01:00
case ")":
2024-03-13 17:17:09 +01:00
return &LexToken{Type: Type_Separator, Position: l.Position, Value: Separator_CloseParen}, nil
2024-03-10 22:48:57 +01:00
case "{":
2024-03-13 17:17:09 +01:00
return &LexToken{Type: Type_Separator, Position: l.Position, Value: Separator_OpenCurly}, nil
2024-03-10 22:48:57 +01:00
case "}":
2024-03-13 17:17:09 +01:00
return &LexToken{Type: Type_Separator, Position: l.Position, Value: Separator_CloseCurly}, nil
2024-03-10 22:48:57 +01:00
case ";":
2024-03-13 17:17:09 +01:00
return &LexToken{Type: Type_Separator, Position: l.Position, Value: Separator_Semicolon}, nil
2024-03-11 22:05:36 +01:00
case ",":
2024-03-13 17:17:09 +01:00
return &LexToken{Type: Type_Separator, Position: l.Position, Value: Separator_Comma}, nil
2024-03-10 22:48:57 +01:00
default:
2024-03-13 17:17:09 +01:00
return &LexToken{Type: Type_Identifier, Position: l.Position, Value: token}, nil
2024-03-10 22:48:57 +01:00
}
}
func lexer(program string) ([]LexToken, error) {
var tokens []LexToken
2024-03-13 17:17:09 +01:00
lexer := Lexer{Runes: []rune(program)}
for {
token, err := lexer.nextToken()
2024-03-10 22:48:57 +01:00
if err != nil {
return nil, err
}
if len(token) == 0 {
break
}
2024-03-13 17:17:09 +01:00
lexToken, err := lexer.parseToken(token)
2024-03-10 22:48:57 +01:00
if err != nil {
return nil, err
}
2024-03-11 22:05:36 +01:00
tokens = append(tokens, *lexToken)
2024-03-10 22:48:57 +01:00
}
return tokens, nil
}