idea: implement psi parser and the start of symbol declarations

This commit is contained in:
2023-09-11 22:43:34 -04:00
parent b64c7fb259
commit 7aa9d95221
21 changed files with 340 additions and 80 deletions

View File

@ -3,8 +3,8 @@ package gay.pizza.pork.parser
import gay.pizza.pork.ast.Node
object DiscardNodeAttribution : NodeAttribution {
override fun enter() {}
override fun push(token: Token) {}
override fun <T : Node> adopt(node: T) {}
override fun <T : Node> exit(node: T): T = node
override fun <T : Node> guarded(block: () -> T): T =
block()
}

View File

@ -3,8 +3,7 @@ package gay.pizza.pork.parser
import gay.pizza.pork.ast.Node
interface NodeAttribution {
fun enter()
fun push(token: Token)
fun <T: Node> adopt(node: T)
fun <T: Node> exit(node: T): T
fun <T: Node> guarded(block: () -> T): T
}

View File

@ -0,0 +1,3 @@
package gay.pizza.pork.parser
class ParseError(val error: String) : RuntimeException(error)

View File

@ -185,7 +185,7 @@ class Parser(source: PeekableSource<Token>, val attribution: NodeAttribution) {
}
else -> {
throw RuntimeException(
throw ParseError(
"Failed to parse token: ${token.type} '${token.text}' as" +
" expression (index ${unsanitizedSource.currentIndex})"
)
@ -308,7 +308,7 @@ class Parser(source: PeekableSource<Token>, val attribution: NodeAttribution) {
return definition
}
val token = peek()
throw RuntimeException(
throw ParseError(
"Failed to parse token: ${token.type} '${token.text}' as" +
" definition (index ${unsanitizedSource.currentIndex})"
)
@ -318,7 +318,7 @@ class Parser(source: PeekableSource<Token>, val attribution: NodeAttribution) {
val token = peek()
return when (token.type) {
TokenType.Import -> readImportDeclaration()
else -> throw RuntimeException(
else -> throw ParseError(
"Failed to parse token: ${token.type} '${token.text}' as" +
" declaration (index ${unsanitizedSource.currentIndex})"
)
@ -343,7 +343,7 @@ class Parser(source: PeekableSource<Token>, val attribution: NodeAttribution) {
TokenType.GreaterEqual -> InfixOperator.GreaterEqual
TokenType.And -> InfixOperator.BooleanAnd
TokenType.Or -> InfixOperator.BooleanOr
else -> throw RuntimeException("Unknown Infix Operator")
else -> throw ParseError("Unknown Infix Operator")
}
private fun convertPrefixOperator(token: Token): PrefixOperator = when (token.type) {
@ -351,13 +351,13 @@ class Parser(source: PeekableSource<Token>, val attribution: NodeAttribution) {
TokenType.Plus -> PrefixOperator.UnaryPlus
TokenType.Minus -> PrefixOperator.UnaryMinus
TokenType.Tilde -> PrefixOperator.BinaryNot
else -> throw RuntimeException("Unknown Prefix Operator")
else -> throw ParseError("Unknown Prefix Operator")
}
private fun convertSuffixOperator(token: Token): SuffixOperator = when (token.type) {
TokenType.PlusPlus -> SuffixOperator.Increment
TokenType.MinusMinus -> SuffixOperator.Decrement
else -> throw RuntimeException("Unknown Suffix Operator")
else -> throw ParseError("Unknown Suffix Operator")
}
fun readCompilationUnit(): CompilationUnit = within {
@ -425,7 +425,7 @@ class Parser(source: PeekableSource<Token>, val attribution: NodeAttribution) {
private fun expect(vararg types: TokenType): Token {
val token = next()
if (!types.contains(token.type)) {
throw RuntimeException(
throw ParseError(
"Expected one of ${types.joinToString(", ")}" +
" but got type ${token.type} '${token.text}'"
)
@ -459,10 +459,7 @@ class Parser(source: PeekableSource<Token>, val attribution: NodeAttribution) {
}
}
private fun <T: Node> within(block: () -> T): T {
attribution.enter()
return attribution.exit(block())
}
fun <T: Node> within(block: () -> T): T = attribution.guarded(block)
private fun ignoredByParser(type: TokenType): Boolean = when (type) {
TokenType.BlockComment -> true

View File

@ -3,16 +3,10 @@ package gay.pizza.pork.parser
import gay.pizza.pork.ast.Node
import gay.pizza.pork.ast.data
class ParserNodeAttribution : NodeAttribution {
open class ParserNodeAttribution : NodeAttribution {
private val stack = mutableListOf<MutableList<Token>>()
private var current: MutableList<Token>? = null
override fun enter() {
val store = mutableListOf<Token>()
current = store
stack.add(store)
}
override fun push(token: Token) {
val store = current ?: throw RuntimeException("enter() not called!")
store.add(token)
@ -28,8 +22,12 @@ class ParserNodeAttribution : NodeAttribution {
}
}
override fun <T: Node> exit(node: T): T {
val store = stack.removeLast()
override fun <T : Node> guarded(block: () -> T): T {
var store = mutableListOf<Token>()
current = store
stack.add(store)
val node = block()
store = stack.removeLast()
current = stack.lastOrNull()
node.data = ParserAttributes(store)
return node

View File

@ -9,7 +9,9 @@ class Tokenizer(val source: CharSource) {
var endOfComment = false
while (true) {
val char = source.next()
if (char == CharSource.NullChar) throw RuntimeException("Unterminated block comment")
if (char == CharSource.NullChar) {
throw ParseError("Unterminated block comment")
}
append(char)
if (endOfComment) {
@ -48,7 +50,7 @@ class Tokenizer(val source: CharSource) {
while (true) {
val char = source.peek()
if (char == CharSource.NullChar) {
throw RuntimeException("Unterminated string.")
throw ParseError("Unterminated string.")
}
append(source.next())
if (char == '"') {
@ -107,7 +109,7 @@ class Tokenizer(val source: CharSource) {
continue
}
} else {
throw RuntimeException("Unknown Char Consumer")
throw ParseError("Unknown Char Consumer")
}
val text = buildString {
@ -134,7 +136,7 @@ class Tokenizer(val source: CharSource) {
return readStringLiteral(char)
}
throw RuntimeException("Failed to parse: (${char}) next ${source.peek()}")
throw ParseError("Failed to parse: (${char}) next ${source.peek()}")
}
return Token.endOfFile(source.currentIndex)
}